From 355ff0bc201e00856ba20d82c65b14ffa6fcfe4b Mon Sep 17 00:00:00 2001 From: Fabien Fleutot Date: Mon, 13 Jan 2014 19:26:50 +0100 Subject: [PATCH] Merge branch 'master' of ssh://git.eclipse.org/gitroot/koneki/org.eclipse.koneki.metalua --- .gitignore | 24 - INSTALL.TXT | 68 - LICENSE | 45 - Makefile | 24 - README-compiler.md | 104 ++ README-parser.md | 175 +++ README.TXT | 397 ----- README.md | 13 + checks.lua | 59 + junk/README | 1 - junk/combine.c | 51 - junk/dynamatch.mlua | 29 - junk/hygienic.lua | 279 ---- junk/hygienic2.lua | 101 -- junk/maybe.mlua | 33 - junk/notes.txt | 585 -------- junk/todo.txt | 4 - junk/typecheck.mlua | 106 -- metalua-compiler-0.7.2-1.rockspec | 60 + metalua-parser-0.7.2-1.rockspec | 42 + metalua.lua | 274 ++++ metalua/bytecode.lua | 29 + metalua/compiler.lua | 181 +++ .../compiler/ast_to_src.mlua | 220 ++- metalua/compiler/bytecode.lua | 29 + .../compiler/bytecode}/compile.lua | 136 +- .../compiler/bytecode}/lcode.lua | 36 +- .../compiler/bytecode}/ldump.lua | 55 +- .../compiler/bytecode}/lopcodes.lua | 32 +- metalua/compiler/globals.lua | 86 ++ metalua/compiler/parser.lua | 42 + metalua/compiler/parser/annot/generator.lua | 48 + metalua/compiler/parser/annot/grammar.lua | 112 ++ metalua/compiler/parser/common.lua | 27 + metalua/compiler/parser/expr.lua | 206 +++ metalua/compiler/parser/ext.lua | 96 ++ metalua/compiler/parser/lexer.lua | 43 + metalua/compiler/parser/meta.lua | 138 ++ metalua/compiler/parser/misc.lua | 175 +++ metalua/compiler/parser/stat.lua | 279 ++++ metalua/compiler/parser/table.lua | 77 + metalua/dollar.mlua | 31 + metalua/extension/comprehension.mlua | 282 ++++ .../metalua => metalua}/extension/match.mlua | 284 ++-- .../metalua => metalua}/extension/xmatch.mlua | 28 +- .../gg.lua => metalua/grammar/generator.lua | 358 +++-- metalua/grammar/lexer.lua | 672 +++++++++ metalua/loader.lua | 128 ++ metalua/pprint.lua | 295 ++++ metalua/repl.mlua | 108 ++ metalua/treequery.mlua | 467 ++++++ metalua/treequery/walk.mlua | 257 ++++ src/bin/lua.exe | Bin 45056 -> 0 bytes src/bin/luac.exe | Bin 208896 -> 0 bytes src/build-utils/bootstrap.lua | 82 -- src/build-utils/precompile.lua | 37 - src/compiler/Makefile | 67 - src/compiler/lexer.lua | 510 ------- src/compiler/metalua.mlua | 258 ---- src/compiler/mlc.mlua | 195 --- src/compiler/mlp_expr.lua | 213 --- src/compiler/mlp_ext.lua | 89 -- src/compiler/mlp_lexer.lua | 32 - src/compiler/mlp_meta.lua | 118 -- src/compiler/mlp_misc.lua | 185 --- src/compiler/mlp_stat.lua | 226 --- src/compiler/mlp_table.lua | 92 -- src/lib/errnode.lua | 19 - src/lib/metalua/base.lua | 104 -- src/lib/metalua/clopts.mlua | 204 --- src/lib/metalua/compiler.lua | 3 - src/lib/metalua/dollar.mlua | 24 - src/lib/metalua/extension/H-runtime.mlua | 216 --- src/lib/metalua/extension/H.mlua | 22 - src/lib/metalua/extension/anaphoric.mlua | 54 - src/lib/metalua/extension/clist.mlua | 149 -- src/lib/metalua/extension/continue.mlua | 53 - src/lib/metalua/extension/localin.mlua | 2 - src/lib/metalua/extension/log.mlua | 39 - src/lib/metalua/extension/ternary.mlua | 10 - src/lib/metalua/extension/trycatch.mlua | 189 --- src/lib/metalua/extension/types-runtime.mlua | 159 -- src/lib/metalua/extension/types.mlua | 352 ----- src/lib/metalua/extension/withdo.mlua | 30 - src/lib/metalua/extension/xglobal-runtime.lua | 41 - src/lib/metalua/extension/xglobal.mlua | 20 - src/lib/metalua/extension/xloop.mlua | 100 -- src/lib/metalua/metaloop.mlua | 76 - src/lib/metalua/mlc_xcall.lua | 119 -- src/lib/metalua/package2.lua | 106 -- src/lib/metalua/runtime.lua | 3 - src/lib/metalua/string2.lua | 44 - src/lib/metalua/table2.lua | 380 ----- src/lib/metalua/walk.mlua | 304 ---- src/lib/metalua/walk/bindings.mlua | 43 - src/lib/metalua/walk/id.mlua | 186 --- src/lib/metalua/walk/scope.lua | 54 - src/lib/serialize.lua | 193 --- src/lib/strict.lua | 45 - src/lib/verbose_require.lua | 11 - src/make.bat | 58 - src/make.sh | 128 -- src/samples/clist_test.mlua | 20 - src/samples/h_test.mlua | 60 - src/samples/h_test2.mlua | 25 - src/samples/hello_world.mlua | 7 - src/samples/ifexpr.mlua | 142 -- src/samples/lex_switch_test.mlua | 40 - src/samples/match_test.mlua | 86 -- src/samples/metalint/INSTALL.TXT | 22 - src/samples/metalint/LICENCE.TXT | 27 - src/samples/metalint/README.TXT | 159 -- src/samples/metalint/dlua/base.dlua | 203 --- src/samples/metalint/dlua/clopts.dlua | 1 - .../metalint/dlua/metalua/compiler.dlua | 402 ------ src/samples/metalint/dlua/walk.dlua | 3 - src/samples/metalint/dlua/walk/id.dlua | 3 - src/samples/metalint/metalint.dlua | 10 - src/samples/metalint/metalint.mlua | 294 ---- src/samples/synth.mlua | 560 ------- src/samples/trycatch_test.mlua | 107 -- src/samples/types_test.mlua | 19 - src/samples/walk_id_test.mlua | 26 - src/samples/weaver.mlua | 120 -- src/samples/withdo_test.mlua | 13 - src/samples/xglobals_test.mlua | 40 - src/samples/xloop_test.mlua | 4 - src/samples/xmatch_test.mlua | 54 - src/tests/locals-and-stats.mlua | 15 - src/tests/reweave.mlua | 25 - src/tests/reweave/comment.lua | 3 - src/tests/reweave/comment2.lua | 2 - src/tests/reweave/comment_dup.lua | 8 - src/tests/reweave/comments.lua | 8 - src/tests/reweave/dup.lua | 2 - src/tests/reweave/empty.lua | 0 src/tests/reweave/extra_whitespace.lua | 16 - src/tests/reweave/function-index-decl.lua | 2 - src/tests/reweave/if.lua | 1 - src/tests/reweave/index_index.lua | 1 - src/tests/reweave/schema.lua | 1286 ----------------- src/tests/reweave/scope.lua | 3 - src/tests/reweave/str.lua | 2 - src/tests/reweave/ws_simple.lua | 1 - src/tests/run.mlua | 37 - 145 files changed, 5241 insertions(+), 11693 deletions(-) delete mode 100644 .gitignore delete mode 100644 INSTALL.TXT delete mode 100644 LICENSE delete mode 100644 Makefile create mode 100644 README-compiler.md create mode 100644 README-parser.md delete mode 100644 README.TXT create mode 100644 README.md create mode 100644 checks.lua delete mode 100644 junk/README delete mode 100644 junk/combine.c delete mode 100644 junk/dynamatch.mlua delete mode 100755 junk/hygienic.lua delete mode 100644 junk/hygienic2.lua delete mode 100644 junk/maybe.mlua delete mode 100755 junk/notes.txt delete mode 100644 junk/todo.txt delete mode 100644 junk/typecheck.mlua create mode 100644 metalua-compiler-0.7.2-1.rockspec create mode 100644 metalua-parser-0.7.2-1.rockspec create mode 100644 metalua.lua create mode 100644 metalua/bytecode.lua create mode 100644 metalua/compiler.lua rename src/lib/metalua/ast_to_string.mlua => metalua/compiler/ast_to_src.mlua (78%) create mode 100644 metalua/compiler/bytecode.lua rename {src/compiler => metalua/compiler/bytecode}/compile.lua (93%) rename {src/compiler => metalua/compiler/bytecode}/lcode.lua (97%) rename {src/compiler => metalua/compiler/bytecode}/ldump.lua (92%) rename {src/compiler => metalua/compiler/bytecode}/lopcodes.lua (95%) create mode 100644 metalua/compiler/globals.lua create mode 100644 metalua/compiler/parser.lua create mode 100644 metalua/compiler/parser/annot/generator.lua create mode 100644 metalua/compiler/parser/annot/grammar.lua create mode 100644 metalua/compiler/parser/common.lua create mode 100644 metalua/compiler/parser/expr.lua create mode 100644 metalua/compiler/parser/ext.lua create mode 100644 metalua/compiler/parser/lexer.lua create mode 100644 metalua/compiler/parser/meta.lua create mode 100644 metalua/compiler/parser/misc.lua create mode 100644 metalua/compiler/parser/stat.lua create mode 100644 metalua/compiler/parser/table.lua create mode 100644 metalua/dollar.mlua create mode 100644 metalua/extension/comprehension.mlua rename {src/lib/metalua => metalua}/extension/match.mlua (71%) rename {src/lib/metalua => metalua}/extension/xmatch.mlua (89%) rename src/compiler/gg.lua => metalua/grammar/generator.lua (76%) create mode 100644 metalua/grammar/lexer.lua create mode 100644 metalua/loader.lua create mode 100644 metalua/pprint.lua create mode 100644 metalua/repl.mlua create mode 100755 metalua/treequery.mlua create mode 100755 metalua/treequery/walk.mlua delete mode 100644 src/bin/lua.exe delete mode 100644 src/bin/luac.exe delete mode 100644 src/build-utils/bootstrap.lua delete mode 100644 src/build-utils/precompile.lua delete mode 100644 src/compiler/Makefile delete mode 100644 src/compiler/lexer.lua delete mode 100644 src/compiler/metalua.mlua delete mode 100644 src/compiler/mlc.mlua delete mode 100644 src/compiler/mlp_expr.lua delete mode 100644 src/compiler/mlp_ext.lua delete mode 100644 src/compiler/mlp_lexer.lua delete mode 100644 src/compiler/mlp_meta.lua delete mode 100644 src/compiler/mlp_misc.lua delete mode 100644 src/compiler/mlp_stat.lua delete mode 100644 src/compiler/mlp_table.lua delete mode 100644 src/lib/errnode.lua delete mode 100644 src/lib/metalua/base.lua delete mode 100644 src/lib/metalua/clopts.mlua delete mode 100644 src/lib/metalua/compiler.lua delete mode 100644 src/lib/metalua/dollar.mlua delete mode 100644 src/lib/metalua/extension/H-runtime.mlua delete mode 100644 src/lib/metalua/extension/H.mlua delete mode 100644 src/lib/metalua/extension/anaphoric.mlua delete mode 100644 src/lib/metalua/extension/clist.mlua delete mode 100644 src/lib/metalua/extension/continue.mlua delete mode 100644 src/lib/metalua/extension/localin.mlua delete mode 100644 src/lib/metalua/extension/log.mlua delete mode 100644 src/lib/metalua/extension/ternary.mlua delete mode 100644 src/lib/metalua/extension/trycatch.mlua delete mode 100644 src/lib/metalua/extension/types-runtime.mlua delete mode 100644 src/lib/metalua/extension/types.mlua delete mode 100644 src/lib/metalua/extension/withdo.mlua delete mode 100644 src/lib/metalua/extension/xglobal-runtime.lua delete mode 100644 src/lib/metalua/extension/xglobal.mlua delete mode 100644 src/lib/metalua/extension/xloop.mlua delete mode 100644 src/lib/metalua/metaloop.mlua delete mode 100644 src/lib/metalua/mlc_xcall.lua delete mode 100644 src/lib/metalua/package2.lua delete mode 100644 src/lib/metalua/runtime.lua delete mode 100644 src/lib/metalua/string2.lua delete mode 100644 src/lib/metalua/table2.lua delete mode 100644 src/lib/metalua/walk.mlua delete mode 100644 src/lib/metalua/walk/bindings.mlua delete mode 100644 src/lib/metalua/walk/id.mlua delete mode 100644 src/lib/metalua/walk/scope.lua delete mode 100644 src/lib/serialize.lua delete mode 100644 src/lib/strict.lua delete mode 100644 src/lib/verbose_require.lua delete mode 100644 src/make.bat delete mode 100755 src/make.sh delete mode 100644 src/samples/clist_test.mlua delete mode 100644 src/samples/h_test.mlua delete mode 100644 src/samples/h_test2.mlua delete mode 100644 src/samples/hello_world.mlua delete mode 100644 src/samples/ifexpr.mlua delete mode 100644 src/samples/lex_switch_test.mlua delete mode 100644 src/samples/match_test.mlua delete mode 100644 src/samples/metalint/INSTALL.TXT delete mode 100644 src/samples/metalint/LICENCE.TXT delete mode 100644 src/samples/metalint/README.TXT delete mode 100644 src/samples/metalint/dlua/base.dlua delete mode 100644 src/samples/metalint/dlua/clopts.dlua delete mode 100644 src/samples/metalint/dlua/metalua/compiler.dlua delete mode 100644 src/samples/metalint/dlua/walk.dlua delete mode 100644 src/samples/metalint/dlua/walk/id.dlua delete mode 100644 src/samples/metalint/metalint.dlua delete mode 100644 src/samples/metalint/metalint.mlua delete mode 100644 src/samples/synth.mlua delete mode 100644 src/samples/trycatch_test.mlua delete mode 100644 src/samples/types_test.mlua delete mode 100644 src/samples/walk_id_test.mlua delete mode 100644 src/samples/weaver.mlua delete mode 100644 src/samples/withdo_test.mlua delete mode 100644 src/samples/xglobals_test.mlua delete mode 100644 src/samples/xloop_test.mlua delete mode 100755 src/samples/xmatch_test.mlua delete mode 100644 src/tests/locals-and-stats.mlua delete mode 100644 src/tests/reweave.mlua delete mode 100644 src/tests/reweave/comment.lua delete mode 100644 src/tests/reweave/comment2.lua delete mode 100644 src/tests/reweave/comment_dup.lua delete mode 100644 src/tests/reweave/comments.lua delete mode 100644 src/tests/reweave/dup.lua delete mode 100644 src/tests/reweave/empty.lua delete mode 100644 src/tests/reweave/extra_whitespace.lua delete mode 100644 src/tests/reweave/function-index-decl.lua delete mode 100644 src/tests/reweave/if.lua delete mode 100644 src/tests/reweave/index_index.lua delete mode 100644 src/tests/reweave/schema.lua delete mode 100644 src/tests/reweave/scope.lua delete mode 100644 src/tests/reweave/str.lua delete mode 100644 src/tests/reweave/ws_simple.lua delete mode 100644 src/tests/run.mlua diff --git a/.gitignore b/.gitignore deleted file mode 100644 index ae31c06..0000000 --- a/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -*.luac -*.o -*.so -*.dylib -*.sh -*.dll -*.exe -*.a -*.log -*~ -.#* -\#* -Thumbs.db -.DS_Store -semantic.cache -src/compiler/metalua -src/lua/luac -src/lua/lua -src/compiler/bootstrap -src/compiler/metalua -distrib -junk -patches -ran diff --git a/INSTALL.TXT b/INSTALL.TXT deleted file mode 100644 index ec443f3..0000000 --- a/INSTALL.TXT +++ /dev/null @@ -1,68 +0,0 @@ -Installation guidelines -======================= - -====================================================================== -TESTING AN INSTALLATION PROCEDURE IS HARD, AND RARELY DONE RIGHT AT -THE FIRST TRY. IF YOU EXPERIENCE INSTALLATION TROUBLES, PLEASE REPORT -THEM, TO AVOID THEM TO FUTURE USERS. mailto:metalua@gmail.com -====================================================================== - -Prerequisites -------------- -- under MS-Windows, ability to type a couple of commands in a DOS command window -- under POSIX OSes, lua and luac executables in your path, in versions >=5.1 - -MS-Windows ----------- - -- get the sources -- cd metalua\src -- edit make.bat to set your variables: - * DISTRIB_BIN: where you want to put executables metalua.bat, lua.exe, luac.exe. - This directory should be referenced in your PATH environment variable. - * DISTRIB_LIB: where you want to put your Lua libraries. It shouldn't mess up - an existing Lua libraries directory. This folder should be referrenced in your - LUA_PATH environment variable. -- run make.bat - -Unix ----- - -- get the sources -- cd metalua/src -- set these variables or modify them in make.sh: - * BUILD: a directory in which metalua should be built. - Must be writable by the user. - * INSTALL_BIN: where metalua will be copied - * INSTALL_LIB: where (meta)lua libs will be copied. Should be - referenced in your LUA_PATH. -- run ./make.sh, under your UID -- a ./make-install.sh script should have been generated, run it. If you - want to install it in a directory that doesn't belong to you, you - might want to run it as root. -- if you don't want to edit make.sh, this will do: - DESTDIR=/opt/metalua/git-1 \ - INSTALL_BIN=/usr/local/bin \ - INSTALL_LIB=/usr/local/lib/lua/5.1 \ - ./make.sh - If this run is successful, do: - DESTDIR=/opt/metalua/git-1 \ - INSTALL_BIN=/usr/local/bin \ - INSTALL_LIB=/usr/local/lib/lua/5.1 \ - ./make-install.sh - if no packaging is required, just leave out the "DESTDIR=... \" line. - If you want metalua to reside in the same space as distribution - supplied files in "/usr" instead of "/usr/local", you propably want to - leave out "local/" in the paths above. - -Test drive ----------- -There are some samples in metalua/src/samples, which can be run simply by -typing in the shell `metalua samplename.mlua`. Use `metalua -h` to -have an overview of interesting options. Among them, "-a" dumps the -AST resulting from a compilation: that's the perfect learning tool for -practical-oriented minds. - -Once you've played with the samples, and maybe written a couple of -simple programs, I'm afraid the next step will be to RTFM :) - diff --git a/LICENSE b/LICENSE deleted file mode 100644 index e6574fb..0000000 --- a/LICENSE +++ /dev/null @@ -1,45 +0,0 @@ -Metalua - -Copyright (c) 2006-2997 Fabien Fleutot - -Metalua is available under the MIT licence. - -To compile and use Metalua, you need to have installed the following -project, released under the MIT public licence: - -- Lua 5.1 - -Significant parts of the compiler borrow code from the following project, -released under the MIT license: - -- Kein-Hong Man's Yueliang - -Previous versions of Metalua used to embed the following projects, -all under MIT licence (hese dependencies have been removed to turn -Metalua into a pure Lua project, simplifying the port to non-Unix OSes): - -- Tomas Guisasola's Lua Rings -- Ben Sunshine-Hill's Pluto -- Thomas Reuben's Bitlib - -MIT License -=========== - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Makefile b/Makefile deleted file mode 100644 index 94a163a..0000000 --- a/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -# Bootstrapped makefiles are a PITA to get right, so metalua is simply generated by -# a shell script (also available for windows). -# -# To customize installation directories, edit metalua/src/make.{sh,bat} -# - on windows, change DISTRIB_LIB and DISTRIB_BIN -# - on unix, change INSTALL_LIB and INSTALL_BIN -# -# DISTRIB_LIB / INSTALL_LIB can point to an existing Lua library structure, and shouldn't -# mess it up. - - - -# Compile everything in a staging area, by default /tmp/metalua-build. - -all: - cd src && ./make.sh - - - -# src/make-install.sh is generated by src/make.sh, so "make install" won't work unless -# you called "make" first. - -install: - cd src && ./make-install.sh \ No newline at end of file diff --git a/README-compiler.md b/README-compiler.md new file mode 100644 index 0000000..b2679cd --- /dev/null +++ b/README-compiler.md @@ -0,0 +1,104 @@ +Metalua Compiler +================ + +## Metalua compiler + +This module `metalua-compiler` depends on `metalua-parser`. Its main +feature is to compile ASTs into Lua 5.1 bytecode, allowing to convert +them into bytecode files and executable functions. This opens the +following possibilities: + +* compiler objects generated with `require 'metalua.compiler'.new()` + support methods `:xxx_to_function()` and `:xxx_to_bytecode()`; + +* Compile-time meta-programming: use of `-{...}` splices in source + code, to generate code during compilation; + +* Some syntax extensions, such as structural pattern matching and + lists by comprehension; + +* Some AST manipulation facilities such as `treequery`, which are + implemented with Metalua syntax extensions. + +## What's new in Metalua 0.7 + +This is a major overhaul of the compiler's architecture. Some of the +most noteworthy changes are: + +* No more installation or bootstrap script. Some Metalua source files + have been rewritten in plain Lua, and module sources have been + refactored, so that if you just drop the `metalua` folder somewhere + in your `LUA_PATH`, it works. + +* The compiler can be cut in two parts: + + * a parser which generates ASTs out of Lua sources, and should be + either portable or easily ported to Lua 5.2; + + * a compiler, which can turn sources and AST into executable + Lua 5.1 bytecode and run it. It also supports compile-time + meta-programming, i.e. code included between `-{ ... }` is + executed during compilation, and the ASTs it produces are + included in the resulting bytecode. + +* Both parts are packaged as separate LuaRocks, `metalua-parser` and + `metalua-compiler` respectively, so that you can install the former + without the latter. + +* The parser is not a unique object anymore. Instead, + `require "metalua.compiler".new()` returns a different compiler + instance every time it's called. Compiler instances can be reused on + as many source files as wanted, but extending one instance's grammar + doesn't affect other compiler instances. + +* Included standard library has been shed. There are too many standard + libs in Lua, and none of them is standard enough, offering + yet-another-one, coupled with a specific compiler can only add to + confusion. + +* Many syntax extensions, which either were arguably more code samples + than actual production-ready tools, or relied too heavily on the + removed runtime standard libraries, have been removed. + +* The remaining libraries and samples are: + + * `metalua.compiler` converts sources into ASTs, bytecode, + functions, and ASTs back into sources. + + * `metalua` compiles and/or executes files from the command line, + can start an interactive REPL session. + + * `metalua.loader` adds a package loader which allows to use modules + written in Metalua, even from a plain Lua program. + + * `metalua.treequery` is an advanced DSL allowing to search ASTs in + a smart way, e.g. "_search `return` statements which return a + `local` variable but aren't in a nested `function`_". + + * `metalua.extension.comprehension` is a language extension which + supports lists by comprehension + (`even = { i for i=1, 100 if i%2==0 }`) and improved loops + (`for i=1, 10 for j=1,10 if i~=j do print(i,j) end`). + + * `metalua.extension.match` is a language extension which offers + Haskell/ML structural pattern matching + (``match AST with `Function{ args, body } -> ... | `Number{ 0 } -> ...end``) + + * **TODO Move basic extensions in a separate module.** + +* To remove the compilation speed penalty associated with + metaprogramming, when environment variable `LUA_MCACHE` or Lua + variable `package.mcache` is defined and LuaFileSystem is available, + the results of Metalua source compilations is cached. Unless the + source file is more recent than the latest cached bytecode file, the + latter is loaded instead of the former. + +* The Luarock install for the full compiler lists dependencies towards + Readline, LuaFileSytem, and Alt-Getopts. Those projects are + optional, but having them automatically installed by LuaRocks offers + a better user experience. + +* The license has changed from MIT to double license MIT + EPL. This + has been done in order to provide the IP guarantees expected by the + Eclipse Foundation, to include Metalua in Eclipse's + [Lua Development Tools](http://www.eclipse.org/koneki/ldt/). diff --git a/README-parser.md b/README-parser.md new file mode 100644 index 0000000..d5edacc --- /dev/null +++ b/README-parser.md @@ -0,0 +1,175 @@ +Metalua Parser +============== + +`metalua-parser` is a subset of the Metalua compiler, which turns +valid Lua source files and strings into abstract syntax trees +(AST). This README includes a description of this AST format. People +interested by Lua code analysis and generation are encouraged to +produce and/or consume this format to represent ASTs. + +It has been designed for Lua 5.1. It hasn't been tested against +Lua 5.2, but should be easily ported. + +## Usage + +Module `metalua.compiler` has a `new()` function, which returns a +compiler instance. This instance has a set of methods of the form +`:xxx_to_yyy(input)`, where `xxx` and `yyy` must be one of the +following: + +* `srcfile` the name of a Lua source file; +* `src` a string containing the Lua sources of a list of statements; +* `lexstream` a lexical tokens stream; +* `ast` an abstract syntax tree; +* `bytecode` a chunk of Lua bytecode that can be loaded in a Lua 5.1 + VM (not available if you only installed the parser); +* `function` an executable Lua function. + +Compiling into bytecode or executable functions requires the whole +Metalua compiler, not only the parser. The most frequently used +functions are `:src_to_ast(source_string)` and +`:srcfile_to_ast("path/to/source/file.lua")`. + + mlc = require 'metalua.compiler'.new() + ast = mlc :src_to_ast[[ return 123 ]] + +A compiler instance can be reused as much as you want; it's only +interesting to work with more than one compiler instance when you +start extending their grammars. + +## Abstract Syntax Trees definition + +### Notation + +Trees are written below with some Metalua syntax sugar, which +increases their readability. the backquote symbol introduces a `tag`, +i.e. a string stored in the `"tag"` field of a table: + +* `` `Foo{ 1, 2, 3 }`` is a shortcut for `{tag="Foo", 1, 2, 3}`; +* `` `Foo`` is a shortcut for `{tag="Foo"}`; +* `` `Foo 123`` is a shortcut for `` `Foo{ 123 }``, and therefore + `{tag="Foo", 123 }`; the expression after the tag must be a literal + number or string. + +When using a Metalua interpreter or compiler, the backtick syntax is +supported and can be used directly. Metalua's pretty-printing helpers +also try to use backtick syntax whenever applicable. + +### Tree elements + +Tree elements are mainly categorized into statements `stat`, +expressions `expr` and lists of statements `block`. Auxiliary +definitions include function applications/method invocation `apply`, +are both valid statements and expressions, expressions admissible on +the left-hand-side of an assignment statement `lhs`. + + block: { stat* } + + stat: + `Do{ stat* } + | `Set{ {lhs+} {expr+} } -- lhs1, lhs2... = e1, e2... + | `While{ expr block } -- while e do b end + | `Repeat{ block expr } -- repeat b until e + | `If{ (expr block)+ block? } -- if e1 then b1 [elseif e2 then b2] ... [else bn] end + | `Fornum{ ident expr expr expr? block } -- for ident = e, e[, e] do b end + | `Forin{ {ident+} {expr+} block } -- for i1, i2... in e1, e2... do b end + | `Local{ {ident+} {expr+}? } -- local i1, i2... = e1, e2... + | `Localrec{ ident expr } -- only used for 'local function' + | `Goto{ } -- goto str + | `Label{ } -- ::str:: + | `Return{ } -- return e1, e2... + | `Break -- break + | apply + + expr: + `Nil | `Dots | `True | `False + | `Number{ } + | `String{ } + | `Function{ { `Id{ }* `Dots? } block } + | `Table{ ( `Pair{ expr expr } | expr )* } + | `Op{ opid expr expr? } + | `Paren{ expr } -- significant to cut multiple values returns + | apply + | lhs + + apply: + `Call{ expr expr* } + | `Invoke{ expr `String{ } expr* } + + lhs: `Id{ } | `Index{ expr expr } + + opid: 'add' | 'sub' | 'mul' | 'div' + | 'mod' | 'pow' | 'concat'| 'eq' + | 'lt' | 'le' | 'and' | 'or' + | 'not' | 'len' + +### Meta-data (lineinfo) + + +ASTs also embed some metadata, allowing to map them to their source +representation. Those informations are stored in a `"lineinfo"` field +in each tree node, which points to the range of characters in the +source string which represents it, and to the content of any comment +that would appear immediately before or after that node. + +Lineinfo objects have two fields, `"first"` and `"last"`, describing +respectively the beginning and the end of the subtree in the +sources. For instance, the sub-node ``Number{123}` produced by parsing +`[[return 123]]` will have `lineinfo.first` describing offset 8, and +`lineinfo.last` describing offset 10: + + + > mlc = require 'metalua.compiler'.new() + > ast = mlc :src_to_ast "return 123 -- comment" + > print(ast[1][1].lineinfo) + + > + +A lineinfo keeps track of character offsets relative to the beginning +of the source string/file ("K8-10" above), line numbers (L1 above; a +lineinfo spanning on several lines would read something like "L1-10"), +columns i.e. offset within the line ("C8-10" above), and a filename if +available (the "?" mark above indicating that we have no file name, as +the AST comes from a string). The final "|C>" indicates that there's a +comment immediately after the node; an initial " mlp.expr.infix:add{ "xor", prec=40, assoc='left', builder=xor_builder } - - Moreover, combinators tend to produce usable error messages when fed - with syntactically incorrect inputs. It matters, because clearly - explaining why an invalid input is invalid is almost as important as - compiling a valid one, for a use=able compiler. - -Yacc-like systems might seem simpler to adopt than combinators, as -long as they're used on extremely simple problems. However, if you -either try to write something non trivial, or to write a simple macro -in a robust way, you'll need to use lots of messy tricks and hacks, -and spend much more time getting them (approximately) right than -that 1/2 hour required to master the regular features of gg. - - -Real meta-programming ---------------------- - -If you plan to go beyond trivial keyword-for-keyword syntax tweaks, -what will limit you is not syntax definition, but the ability to -manipulate source code conveniently: without the proper tools and -abstractions, even the simplest tasks will turn into a dirty hacks -fest, then either into a maintenance nightmare, or simply into -abandonware. Providing an empowering framework so that you don't get -stuck in such predicaments is Metalua's whole purpose. The central -concept is that programs prefer to manipulate code as trees, whereas -most developers prefer ASCII sources, so both representations must be -freely interchangeable. The make-or-break deal is then: - -- To easily let users see sources as trees, as sources, or as - combination thereof, and switch representations seamlessly. - -- To offer the proper libraries that won't force you to reinvent a - square wheel will take care of the most common pitfalls and won't - force you to resort to brittle hacks. - -On the former point, Lisps are at a huge advantage, their user syntax -already being trees. But languages with casual syntax can also offer -interchangeable tree/source views; Metalua has some quoting +{ ... } -and anti-quoting -{ ... } operators which let you switch between both -representations at will: internally it works on trees, but you always -have the option to see them as quoted sources. Metalua also supports a -slightly improved syntax for syntax trees, to improve their -readability. - -Library-wise, Metalua offers a set of syntax tree manipulation tools: - -- Structural pattern matching, a feature traditionally found in - compiler-writing specialized languages (and which has nothing to do - with string regular expressions BTW), which lets you express - advanced tree analysis operations in a compact, readable and - efficient way. If you have to work with advanced data structures - and you try it, you'll never go back. - -- The walker library allows you to perform transformations on big - portions of programs. It lets you easily express things like: - "replace all return statements which aren't in a nested function by - error statements", "rename all local variables and their instances - into unique fresh names", "list the variables which escape this - chunk's scope", "insert a type-checking instruction into every - assignment to variable X", etc. Most of non-trivial macros will - require some of those global code transformations, if you really want - them to behave correctly. - -- Macro hygiene, although not perfect yet in Metalua, is required if - you want to make macro writing reasonably usable (and contrary to a - popular belief, renaming local variables into fresh names only - address the easiest part of the hygiene issue; cf. changelog below - for more details). - -- The existing extensions are progressively refactored in more modular - ways, so that their features can be effectively reused in other - extensions. - - -Noteworthy changes from 0.4.1 to 0.5 -==================================== - -Simplification of the install and structure: - -- This release is included in Lua for Windows, so now it couldn't get simpler - for MS-Windows users! - -- Metalua is written in pure Lua again, thus making it platform-independant. - No more mandatory C libraries. Pluto interface might be back, as an option, - in a future version, but it's not worth the install trouble involved by - DLL dependencies. - -- Simpler build process, just run make.sh or make.bat depending on your OS. - -- Metalua libraries are now in a separate metalua/* package. This allows to - mix them with other Lua libraries, and to use them from plain Lua programs - if you FIXME - - -Other changes: - -- new option -S in metalua: prints sources re-generated from AST, after macro - expansion. - -- compatible with more Lua VMs: 64 bits numbers, integral numbers, big endians... - -- some new extensions: xloop, xmatch, improved match. - -- ASTs now keep track of the source that generated them (API is not - mature though, it will be changed and broken). - -- improved table printer: support of a plain-Lua mode, alternative indentation - mode for deeply-nested tables. - -- added a generic table serializer, which handles shared and recursive - sub-tables correctly. - -- gg API has been made slightly more flexible, as a first step towards a - comprehensive syntax support for gg grammar definition. Follow the gg-syntax - branch on github for ongoing work. - - -Noteworthy changes from 0.4 to 0.4.1 -==================================== - -- Proper reporting of runtime errors -- Interactive REPL loop -- Support for 64 bits architectures -- Update to Pluto 2.2 and Lua 5.1.3 -- Build for Visual Studio .NET - -Noteworthy changes from 0.3 to 0.4 -================================= - -- A significantly bigger code base, mostly due to more libraries: - about 2.5KLoC for libs, 4KLoC for the compiler. However, this remains - tiny in today's desktop computers standards. You don't have to know - all of the system to do useful stuff with it, and since compiled - files are Lua 5.1 compatible, you can keep the "big" system on a - development platform, and keep a lightweight runtime for embedded or - otherwise underpowered targets. - - -- The compiler/interpreter front-end is completely rewritten. The new - frontend program, aptly named 'Metalua', supports proper passing of - arguments to programs, and is generally speaking much more user - friendly than the mlc from the previous version. - - -- Metalua source libraries are searched for in environmemt variable - LUA_MPATH, distinct from LUA_PATH. This way, in an application - that's part Lua part Metalua, you keep a natural access to the - native Lua compiler. - - By convention, Metalua source files should have extension .mlua. By - default, bytecode and plain lua files have higher precedence than - Metalua sources, which lets you easily precompile your libraries. - - -- Compilation of files are separated in different Lua Rings: this - prevents unwanted side-effects when several files are compiled - (This can be turned off, but shouldn't be IMO). - - -- Metalua features are accessible programmatically. Library - 'Metalua.runtime' loads only the libraries necessary to run an - already compiled file; 'Metalua.compile' loads everything useful at - compile-time. - - Transformation functions are available in a library 'mlc' that - contains all meaningful transformation functions in the form - 'mlc.destformat_of_sourceformat()', such as 'mlc.luacfile_of_ast()', - 'mlc.function_of_luastring()' etc. This library has been - significantly completed and rewritten (in Metalua) since v0.3. - - -- Helper libraries have been added. For now they're in the - distribution, at some point they should be luarocked in. These - include: - - Lua Rings and Pluto, duct-taped together into Springs, an improved - Rings that lets states exchange arbitrary data instead of just - scalars and strings. Since Pluto requires a (minor) patch to the - VM, it can be disabled. - - Lua bits for bytecode dumping. - - As always, very large amounts of code borrowed from Yueliang. - - As a commodity, I've also packaged Lua sources in. - - -- Extensions to Lua standard libraries: many more features in table - and the baselib, a couple of string features, and a package system - which correctly handles Metalua source files. - - -- Builds on Linux, OSX, Microsoft Visual Studio. Might build on mingw - (not tested recently, patches welcome). It's easily ported to all - systems with a full support for lua, and if possible dynamic - libraries. - - The MS-windows building is based on a dirty .bat script, because - that's pretty much the only thing you're sure to find on a win32 - computer. It uses Microsoft Visual Studio as a compiler (tested with - VC++ 6). - - Notice that parts of the compiler itself are now written in Metalua, - which means that its building now goes through a bootstrapping - stage. - - -- Structural pattern matching improvements: - - now also handles string regular expressions: 'someregexp'/pattern - will match if the tested term is a string accepted by the regexp, - and on success, the list of captures done by the regexp is matched - against pattern. - - Matching of multiple values has been optimized - - the default behavior when no case match is no to raise an error, - it's the most commonly expected case in practice. Trivial to - cancel with a final catch-all pattern. - - generated calls to type() are now hygienic (it's been the cause of - a puzzling bug report; again, hygiene is hard). - - -- AST grammar overhaul: - The whole point of being alpha is to fix APIs with a more relaxed - attitude towards backward compatibility. I think and hope it's the - last AST revision, so here is it: - - `Let{...} is now called `Set{...} - (Functional programmers would expect 'Let' to introduce an - immutable binding, and assignment isn't immutable in Lua) - - `Key{ key, value } in table literals is now written `Pair{ key, value } - (it contained a key *and* its associated value; besides, 'Pair' is - consistent with the name of the for-loop iterator) - - `Method{...} is now `Invoke{...} - (because it's a method invocation, not a method declaration) - - `One{...} is now `Paren{...} and is properly documented - (it's the node representing parentheses: it's necessary, since - parentheses are sometimes meaningful in Lua) - - Operator are simplified: `Op{ 'add', +{2}, +{2} } instead of - `Op{ `Add, +{2}, +{2} }. Operator names match the corresponding - metatable entries, without the leading double-underscore. - - The operators which haven't a metatable counterpart are - deprecated: 'ne', 'ge', 'gt'. - - -- Overhaul of the code walking library: - - the API has been simplified: the fancy predicates proved more - cumbersome to use than a bit of pattern matching in the visitors. - - binding identifiers are handled as a distinct AST class - - walk.id is scope-aware, handles free and bound variables in a - sensible way. - - the currified API proved useless and sometimes cumbersome, it's - been removed. - - -- Hygiene: I originally planned to release a full-featured hygienic - macro system with v0.4, but what exists remains a work in - progress. Lua is a Lisp-1, which means unhygienic macros are very - dangerous, and hygiene a la Scheme pretty much limits macro writing - to a term rewriting subset of the language, which would be crippling - to use. - - Note: inside hygiene, i.e. preventing macro code from capturing - variables in user code, is trivial to address through alpha - conversion, it's not the issue. The trickier part is outside - hygiene, when user's binders capture globals required by the - macro-generated code. That's the cause of pretty puzzling and hard - to find bugs. And the *really* tricky part, which is still an open - problem in Metalua, is when you have several levels of nesting - between user code and macro code. For now this case has to be - hygienized by hand. - - Note 2: Converge has a pretty powerful approach to hygienic macros - in a Lisp-1 language; for reasons that would be too long to expose - here, I don't think its approach would be the best suited to Metalua. - But I might well be proved wrong eventually. - - Note 3: Redittors must have read that Paul Graham has released Arc, - which is also a Lisp-1 with Common Lisp style macros; I expect this - to create a bit of buzz, out of which might emerge proper solutions - the macro hygiene problem. - - -- No more need to create custom syntax for macros when you don't want - to. Extension 'dollar' will let you declare macros in the dollar - table, as in +{block: function dollar.MYMACRO(a, b, c) ... end}, - and use it as $MYMACRO(1, 2, 3) in your code. - - With this extension, you can write macros without knowing anything - about the Metalua parser. Together with quasi-quotes and automatic - hygiene, this will probably be the closest we can go to "macros for - dummies" without creating an unmaintainable mess generator. - - Besides, it's consistent with my official position that focusing on - superficial syntax issues is counter-productive most of the time :) - - -- Lexers can be switched on the fly. This lets you change the set of - keywords temporarily, with the new gg.with_lexer() combinator. You - can also handle radically different syntaxes in a single file (think - multiple-languages systems such as LuaTeX, or programs+goo as PHP). - - -- Incorporation of the bug fixes reported to the mailing list and on - the blog. - - -- New samples and extensions, in various states of completion: - - * lists by comprehension, a la python/haskell. It includes lists - chunking, e.g. mylist[1 ... 3, 5 ... 7] - - * anaphoric macros for 'if' and 'while' statements: with this - extension, the condition of the 'if'/'while' is bound to variable - 'it' in the body; it lets you write things like: - - > while file:read '*l' do print(it) end. - - No runtime overhead when 'it' isn't used in the body. An anaphoric - variable should also be made accessible for functions, to let - easily write anonymous recursive functions. - - * Try ... catch ... finally extension. Syntax is less than ideal, - but the proper way to fix that is to refactor the match extension - to improve code reuse. There would be many other great ways to - leverage a refactored match extension, e.g. destructuring binds or - multiple dispatch methods. To be done in the next version. - - * with ... do extension: it uses try/finally to make sure that - resources will be properly closed. The only constraint on - resources is that they have to support a :close() releasing method. - For instance, he following code guarantees that file1 and file2 - will be closed, even if a return or an error occurs in the body. - - > with file1, file2 = io.open "f1.txt", io.open "f2.txt" do - > contents = file1:read'*a' .. file2:read ;*a' - > end - - * continue statement, logging facilities, ternary "?:" choice - operator, assignments as expressions, and a couple of similarly - tiny syntax sugar extensions. - - -You might expect in next versions -================================= -The next versions of Metalua will provide some of the following -improvements, in no particular order: better error reporting, -especially at runtime (there's a patch I've been too lazy to test -yet), support for 64 bits CPUs, better support for macro hygiene, more -samples and extensions, an adequate test suite, refactored libraries. - - -Credits -======= - -I'd like to thank the people who wrote the open source code which -makes Metalua run: the Lua team, the authors of Yueliang, Pluto, Lua -Rings, Bitlib; and the people whose bug reports, patches and -insightful discussions dramatically improved the global design, -including John Belmonte, Vyacheslav Egorov, David Manura, Olivier -Gournet, Eric Raible, Laurence Tratt, Alexander Gladysh, Ryan -Pusztai... diff --git a/README.md b/README.md new file mode 100644 index 0000000..dcd10a3 --- /dev/null +++ b/README.md @@ -0,0 +1,13 @@ +Metalua +======= + +Metalua is a Lua code analysis tool, as well as a compiler for a +superset of Lua 5.1 supporting Compile-Time Meta-Programming. It's +separated into two LuaRocks, `metalua-parser` and +`metalua-compiler`. The documentation of each rock can be found in +`README-parser.md` and `README-compiler.md`. + +All the code in Metalue is released under dual lincenses: + +* MIT public license (same as Lua); +* EPL public license (same as Eclipse). diff --git a/checks.lua b/checks.lua new file mode 100644 index 0000000..c0c99ac --- /dev/null +++ b/checks.lua @@ -0,0 +1,59 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-- Alternative implementation of checks() in Lua. Slower than +-- the C counterpart, but no compilation/porting concerns. + +checkers = { } + +local function check_one(expected, val) + if type(val)==expected then return true end + local mt = getmetatable(val) + if mt and mt.__type==expected then return true end + local f = checkers[expected] + if f and f(val) then return true end + return false +end + +local function check_many(name, expected, val) + if expected=='?' then return true + elseif expected=='!' then return (val~=nil) + elseif type(expected) ~= 'string' then + error 'strings expected by checks()' + elseif val==nil and expected :sub(1,1) == '?' then return true end + for one in expected :gmatch "[^|?]+" do + if check_one(one, val) then return true end + end + return false +end + +function checks(...) + for i, arg in ipairs{...} do + local name, val = debug.getlocal(2, i) + local success = check_many(name, arg, val) + if not success then + local fname = debug.getinfo(2, 'n').name + local fmt = "bad argument #%d to '%s' (%s expected, got %s)" + local msg = string.format(fmt, i, fname or "?", arg, type(val)) + error(msg, 3) + end + end +end + +return checks diff --git a/junk/README b/junk/README deleted file mode 100644 index c818c4b..0000000 --- a/junk/README +++ /dev/null @@ -1 +0,0 @@ -This directory is intended to contain unfinished, random, poor or temporary stuff that shouldn't make it to a distro. \ No newline at end of file diff --git a/junk/combine.c b/junk/combine.c deleted file mode 100644 index aa7a94d..0000000 --- a/junk/combine.c +++ /dev/null @@ -1,51 +0,0 @@ -/* This should combine several functions into one, when they're already - * compiled into functions. Useful when we don't have their AST, e.g. - * to link several precompiled chunks into one. - * - * It currently doesn't work; meanwhile, one can use the original - * 'luac' executable, although it doesn't handle argument passing through - * "..." correctly. - */ - -#include -#include -#include -#include -#include -#include -#include - -static int lua_combine( lua_State* L) { - int n = lua_gettop( L); /* Number of functions to combine */ - if( 1 == n) { - return 1; /* Only one function, nothing to combine */ - } else { - int i, pc = 3*n + 1; - Proto* f = luaF_newproto( L); - setptvalue2s( L,L->top,f); - incr_top( L); - f->source = luaS_newliteral( L,"=(combiner)"); - f->maxstacksize = 2; - f->is_vararg = VARARG_ISVARARG; - f->code = luaM_newvector(L, pc, Instruction); - f->sizecode = pc; - f->p = luaM_newvector( L, n, Proto*); - f->sizep = n; - for( i = pc = 0; i < n; i ++) { - int proto_idx = i-n-1; - Proto *p = clvalue( L->top + proto_idx)->l.p; - f->p[i] = p; - f->code[pc++] = CREATE_ABx( OP_CLOSURE, 0, i); - f->code[pc++] = CREATE_ABx( OP_VARARG, 1, 0); - f->code[pc++] = CREATE_ABC( OP_CALL, 0, 0, 1); - } - f->code[pc++] = CREATE_ABC( OP_RETURN, 0, 1, 0); - return 1; - } -} - -int luaopen_combine( lua_State *L) { - lua_pushcfunction( L, lua_combine); - lua_setglobal( L, "combine"); - return 0; -} diff --git a/junk/dynamatch.mlua b/junk/dynamatch.mlua deleted file mode 100644 index 611ce24..0000000 --- a/junk/dynamatch.mlua +++ /dev/null @@ -1,29 +0,0 @@ ---x = dynamatch() ---x <| foo | bar -> do toto end ---table.insert(x.cases, |$| match $ with foo|bar -> toto end ) - - -local match_builder = mlp.stat:get "match" - -function dynacase_builder (d, s) - local v = mlp.gensym() - local m = match_builder{ v, false, { s[1], s[2], s[3] } } - local c = `Function{ {v}, {m} } - return `Call{ `Index{ d, "extend" }, c } -end - ---fixme: limiter la precedence du expr de droite -mlp.expr.suffix:add{ - name = "dynamatch extension", prec=30, - "<|", - gg.list{ name = "patterns", - primary = mlp.expr, - separators = "|", - terminators = { "->", "if" } }, - gg.onkeyword{ "if", mlp.expr }, - "->", - gg.multisequence{ - { "do", mlp.block, "end", builder = |x| x[1] }, - default = { mlp.expr, builder = |x| { `Return{ x[1] } } } }, - builder = |x| dyna_builder (x[1], x[3]) } - diff --git a/junk/hygienic.lua b/junk/hygienic.lua deleted file mode 100755 index ea4adeb..0000000 --- a/junk/hygienic.lua +++ /dev/null @@ -1,279 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id$ --- --- Summary: Hygienic macro facility for Metalua --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- --------------------------------------------------------------------------------- --- --- ============= --- W A R N I N G --- ============= --- --- THIS IS AN OLD NAIVE IMPLEMENTATION. IT'S PARATIAL (NO HYGIENE WRT OUTSIDE) --- AND WRITTEN FROM SCRATCH WITH PATTERN MATCHING. MUST BE DONE WITH A WALKER. --- --- Traditional macros carry a well-known pitfall, called variable capture: --- when pasting a piece of source code A into another code B, if B bonds some --- variables used by A, then the meaning of A is modified in a way probably --- not intended by the user. --- --- Example: --- A = +{ n = 5 } --- B = +{ local n=3; -{ A } } --- --- In this example, [n] in [A] will be captured by the local variable declared --- by [B], and this is probably a bug. --- --- Notice that this also exists in C. Typical example: --- --- #define swap (type, a, b) do { type tmp=a; a=b; b=tmp } while(0) --- void f() { --- int tmp=1, a=2; --- swap (int, tmp, a); // won't work, [tmp] is captured in the macro --- } --- --- We can fix this by making sure that all local variables and parameters --- created by [B] have fresh names. [mlp.gensym()] produces guaranteed-to-be-unique --- variable names; we use it to replace all local var names declarations and --- occurences in [B] by such fresh names. --- --- Such macros which are guaranteed not to capture any variable are called --- hygienic macros. By extension, an AST guaranteed not to contain capturing --- variables is called an hygienic AST. --- --- We implement here some functions which make sure that an AST is hygienic: --- --- - [hygienize_stat (ast)] for statement AST; --- - [hygienize_stat (ast)] for statement block AST; --- - [hygienize_expr (ast)] for expression AST; --- --- This sample deconstructs AST by structural pattern matching, which is --- supported by Metalua extension "match.lua" --- --------------------------------------------------------------------------------- - --{ extension "match" } - -require "std" - -local clone_ctx = std.shallow_copy - --------------------------------------------------------------------------------- --- Tag tables: these allow [hygienize] to decide whether an AST is --- an expression, a statement, or something which isn't changed by --- alpha renaming. --------------------------------------------------------------------------------- -local stat_tags = { - Do = true, Let = true, - While = true, Repeat = true, - If = true, Fornum = true, - Forin = true, Local = true, - Localrec = true, Return = true } - -local expr_tags = { - Function = true, Table = true, - Op = true, Call = true, - Method = true, Index = true } - -local neutral_tags = { - String = true, Number = true, - True = true, False = true, - Dots = true, Break = true, - Id = true } - --------------------------------------------------------------------------------- --- Choose the relevant [hygienize_xxx()] function according to the AST's tag --- and the tables above. --------------------------------------------------------------------------------- -function hygienize (ast) - if not ast.tag then hygienize_block (ast) - elseif neutral_tags[ast.tag] then -- pass - elseif stat_tags[ast.tag] then hygienize_stat (ast) - elseif expr_tags[ast.tag] then hygienize_expr (ast) - else error "Unrecognized AST" end - return ast -end - -if mlp then - -- Add hygienic parsers for quotes - mlp.hexpr = hygienize `o` mlp.expr - mlp.hstat = hygienize `o` mlp.stat - mlp.hblock = hygienize `o` mlp.block -end - --------------------------------------------------------------------------------- --- Make a statement AST hygienic. The optional [ctx] parameter is a --- [old_name -> new_name] map, which holds variable name substitutions --- to perform. --------------------------------------------------------------------------------- -function hygienize_stat (ast, ctx) - if not ctx then ctx = { } end - match ast with - | { ... } if not ast.tag -> hygienize_block (ast, ctx) - | `Do{ ... } -> hygienize_block (ast, clone_ctx (ctx)) - - | `Let{ vars, vals } -> - hygienize_expr_list (vars, ctx) - hygienize_expr_list (vals, ctx) - - | `While{ cond, block } -> - hygienize_expr (cond, ctx) - -- use a clone of [ctx], since the block has a separate scope - hygienize_block (ast, clone_ctx (ctx)) - - | `Repeat{ block, cond } -> - -- use a clone of [ctx], since the block has a separate scope. - -- Notice that the condition in [repeat ... until] is evaluated - -- inside the block's scope, i.e. with [inner_ctx] rather than [ctx]. - local inner_ctx = clone_ctx (ctx) - hygienize_block (ast, inner_ctx) - hygienize (cond, inner_ctx) - - | `If{ ... } -> - for i=1, #ast-1, 2 do - hygienize_expr (ast[i], ctx) -- condtion - -- each block has its own scope - hygienize_block (ast[i+1], clone_ctx (ctx)) -- conditional block - end - if #ast % 2 == 1 then - hygienize_block (ast[#ast], clone_ctx (ctx)) -- else block - end - - | `Fornum{ var, ... } -> - hygienize_expr (ast[i], ctx, 2, #ast-1) -- start, finish, step? exprs - local inner_ctx = clone_ctx (ctx) - alpha_rename (var, inner_ctx) -- rename local var [var] in [inner_ctx] - hygienize_block (ast[#ast], inner_ctx) - - | `Forin{ vars, vals, block } -> - hygienize_expr_list (vals, ctx) - local inner_ctx = clone_ctx (ctx) - alpha_rename_list (vars, inner_ctx) -- rename local vars [vars] in [inner_ctx] - hygienize_block (block, inner_ctx) - - | `Local{ vars, vals } -> - -- locals only enter in scope after their values are computed - -- --> parse values first, then rename vars - hygienize_expr_list (vals, ctx) - alpha_rename_list (vars, ctx) - - | `Localrec{ vars, vals } -> - -- As opposed to [`Local], vars are in scope during their values' - -- computation --> rename before parsing values. - alpha_rename_list (vars, ctx) - hygienize_expr_list (vals, ctx) - - | `Call{ ... } | `Method{ ... } -> - -- these are actually expr, delegate to [hygienize_expr] - hygienize_expr (ast, ctx) - - | `Return{ ... } -> hygienize_expr_list (ast, ctx) - | `Break -> - | _ -> error ("Unknown statement "..ast.tag) - end -end - - --------------------------------------------------------------------------------- --- Make an expression AST hygienic. The optional [ctx] parameter is a --- [old_name -> new_name] map, which holds variable name substitutions --- to perform. --------------------------------------------------------------------------------- -function hygienize_expr (ast, ctx) - if not ctx then ctx = { } end - match ast with - | `String{ _ } | `Number{ _ } | `True | `False | `Dots -> -- nothing - - | `Function{ params, block } -> - local inner_ctx = clone_ctx (ctx) - alpha_rename_list (params, inner_ctx) - hygienize_block (block, inner_ctx) - - | `Table{ ... } -> - for _, x in ipairs (ast) do - match x with - | `Key{ key, val } -> - hygienize_expr (key, ctx) - hygienize_expr (val, ctx) - | _ -> hygienize (x, ctx) - end - end - - | `Id{ x } -> - -- Check for substitutions to apply: - local y = ctx[x]; if y then ast[1] = y end - - | `Op{ op, ... } -> - hygienize_expr_list (ast, ctx, 2, #ast) - - -- Just dispatch to sub-expressions: - | `Call{ func, ... } - | `Method{ obj, `String{ name }, ... } - | `Index{ table, key } -> - hygienize_expr_list (ast, ctx) - | _ -> error ("Unknown expression "..ast.tag) - end -end - --------------------------------------------------------------------------------- --- Make an statements block AST hygienic. The optional [ctx] parameter is a --- [old_name -> new_name] map, which holds variable name substitutions --- to perform. --------------------------------------------------------------------------------- -function hygienize_block (ast, ctx) - if not ctx then ctx = { } end - table.iter ((|x| hygienize(x, ctx)), ast) --- for i = 1, #ast do --- hygienize_stat (ast[i], ctx) --- end -end - --------------------------------------------------------------------------------- --- Makes a shallow copy of a table. Used to make a copy of [ctx] substitution --- tables, when entering a new scope. --------------------------------------------------------------------------------- ---[[ -function clone_ctx (ctx) - local r = { } - for k, v in pairs (ctx) do r[k] = v end - return r -end -]] - --------------------------------------------------------------------------------- --- Make every expression from index [start] to [finish], in list --- [ast], hygienic. The optional [ctx] parameter is a [old_name -> --- new_name] map, which holds variable name substitutions to perform. --- [start] defaults to 1, [finish] defaults to the list's size. --------------------------------------------------------------------------------- -function hygienize_expr_list (ast, ctx, start, finish) - for i = start or 1, finish or #ast do - hygienize_expr (ast[i], ctx) - end -end - --------------------------------------------------------------------------------- --- Replace the identifier [var]'s name with a fresh one generated by --- [mlp.gensym()], and store the new association in [ctx], so that the --- calling function will be able to substitute identifier occurences with --- its new name. --------------------------------------------------------------------------------- -function alpha_rename (var, ctx) - assert (var.tag == "Id") - ctx[var[1]] = mlp.gensym()[1] - var[1] = ctx[var[1]] -end - --------------------------------------------------------------------------------- --- runs [alpha_rename] on a list of identifiers. --------------------------------------------------------------------------------- -function alpha_rename_list (vars, ctx) - for _, v in ipairs(vars) do alpha_rename (v, ctx) end -end diff --git a/junk/hygienic2.lua b/junk/hygienic2.lua deleted file mode 100644 index d0ca8c3..0000000 --- a/junk/hygienic2.lua +++ /dev/null @@ -1,101 +0,0 @@ --------------------------------------------------------------------------------- --- --- (c) Fabien Fleutot 2007, published under the MIT license. --- --- --- API: --- ---- --- * freevars.block(ast) --- * freevars.expr(ast) --- * freevars.stat(ast) --- --------------------------------------------------------------------------------- - -require 'std' -require 'walk' -require 'freevars' - --{ extension 'match' } - --------------------------------------------------------------------------------- --- Return the string->boolean hash table of the names of all free variables --- in 'term'. 'kind' is the name of an entry in module 'walk', presumably --- one of 'expr', 'stat' or 'block'. --------------------------------------------------------------------------------- -local function alpha (kind, term) - local cfg = { expr = { }, stat = { }, block = { } } - - ----------------------------------------------------------------------------- - -- Monkey-patch the scope add method, so that it associates a unique name - -- to bound vars. - ----------------------------------------------------------------------------- - local scope = scope:new() - function scope:add(vars) - for v in values(vars) do self.current[v] = mlp.gensym(v) end - end - - ----------------------------------------------------------------------------- - -- Check identifiers; add functions parameters to scope - ----------------------------------------------------------------------------- - function cfg.expr.down(x) - match x with - | `Splice{...} -> return 'break' -- don't touch user parts - | `Id{ name } -> - local alpha = scope.current[name] - if alpha then x[1] = alpha end - | `Function{ params, _ } -> scope:push(); scope:add (params) - | _ -> -- pass - end - end - - ----------------------------------------------------------------------------- - -- Close the function scope opened by 'down()' - ----------------------------------------------------------------------------- - function cfg.expr.up(x) - match x with `Function{...} -> scope:pop() | _ -> end - end - - ----------------------------------------------------------------------------- - -- Create a new scope and register loop variable[s] in it - ----------------------------------------------------------------------------- - function cfg.stat.down(x) - match x with - | `Splice{...} -> return 'break' - | `Forin{ vars, ... } -> scope:push(); scope:add(vars) - | `Fornum{ var, ... } -> scope:push(); scope:add{var} - | `Localrec{ vars, ... } -> scope:add(vars) - | `Repeat{ block, cond } -> -- 'cond' is in the scope of 'block' - scope:push() - for s in values (block) do walk.stat(cfg)(s) end -- no new scope - walk.expr(cfg)(cond) - scope:pop() - return 'break' -- No automatic walking of subparts - | _ -> -- pass - end - end - - ----------------------------------------------------------------------------- - -- Close the scopes opened by 'up()' - ----------------------------------------------------------------------------- - function cfg.stat.up(x) - match x with - | `Forin{ ... } | `Fornum{ ... } -> scope:pop() -- `Repeat has no up(). - | `Local{ vars, ... } -> scope:add(vars) - | _ -> -- pass - end - end - - ----------------------------------------------------------------------------- - -- Create a separate scope for each block - ----------------------------------------------------------------------------- - function cfg.block.down() scope:push() end - function cfg.block.up() scope:pop() end - - walk[kind](cfg)(term) - return freevars -end - --------------------------------------------------------------------------------- --- A wee bit of metatable hackery. Just couldn't resist, sorry. --------------------------------------------------------------------------------- -freevars = setmetatable ({ scope=scope }, { __index = |_, k| |t| fv(k, t) }) diff --git a/junk/maybe.mlua b/junk/maybe.mlua deleted file mode 100644 index 1b29ae1..0000000 --- a/junk/maybe.mlua +++ /dev/null @@ -1,33 +0,0 @@ -mlp.lexer:add{ '?.', '?(', '?[' } - -function maybe_field_builder(e, f) - f = f[1] - local v = mlp.gensym() - local block = - +{block: local -{v} = -{e}; if not -{v}[-{f}] then -{v}[-{f}] = { } end} - return `Stat{ block, v } -end - -function maybe_dot_builder(e, f) - return maybe_field_builder(e, {mlp.id2string(f[1])}) -end - --- NON, ca gere pas les multireturns. Le plus simple c'est d'avoir un support runtime. - -function maybe_call_builder(e, args) - args = args[1] - local v, w = mlp.gensym(), mlp.gensym() - local block = - +{block: local -{v}, -{w} = -{e}; if -{v} then (-{w})-{`Call(v, unpack(args))} end} - return `Stat{ block, w } -end - -function maybe_invoke_builder(e, s) - local name, args = unpack(s) - local v, w = mlp.gensym(), mlp.gensym() - local block = - +{block: local -{v}, -{w} = -{e}; if -{v} then (-{w})-{`Invoke(v, unpack(args))} end} - return `Stat{ block, w } -end - -mlp.expr.suffix:add{ '?.', mlp.id, builder = |e, f| \ No newline at end of file diff --git a/junk/notes.txt b/junk/notes.txt deleted file mode 100755 index 053de91..0000000 --- a/junk/notes.txt +++ /dev/null @@ -1,585 +0,0 @@ -=== Random personal notes === - -==================================================================== -This is my persistent *scratch*. There are random notes, in random -languages, unstructured, out of date, generally unexploitable. Don't -expect anything here to make sense. -==================================================================== - -Readme fraft -============ -This distribution of metalua tries to favor ease of install over -saving a couple of kilobytes; therefore it directly embeds pieces of -useful other free software rather than letting users fetch and -configure them, and it installs separately from a regular lua install, -instead of messing it up. - -Open source code used by metalua - - -Metalua current sources include (possibly slightly modified versions -of) these open source projects: - -- Lua, of course. - -- Yueliang, a Lua compiler in written in Lua: this is - the base of metalua's bytecode dumper. - -- Rings, a part of the Kelper project, which lets handle multiple Lua - states from within Lua. - -- Editline, an interactive command line editor, very similar to GNU - readline but with a more permissive licence. - -- bitlib for bitwise manipulations (especially useful for bytecode - dumping) - -The mlc module --------------- -This module contains all of the compilation stuff. The version -available after compilation is heavily based on the pattern matching -extension, so it isn't used in the code required for bootstrapping -(which has to be in lua inter metalua. That is, then, Lua) - -Libraries ---------- -The mechanism of metalua libraries is exactly the same as Lua's, -except that the environment variable names ahve been changed to allow -peaceful cohabitation with Lua. - -Etancheite: -=========== -- shell interactif: - * separer les niveaux - * permettre de monter/descendre d'un niveau avec des commandes - dediees: "+:" et "-:" - -Il faut faire la part entre la synthese de l'AST et l'evaluation. La -synthese de l'AST est faite en amont de mlc.splice(). Apparemment, le -lexer est commun a tout le monde... et mlp aussi. - -Hygiene: -======== -les quotes peuvent etre hygieniques (HQQ) ou pas (QQ). les QQ sont -normales, ne font rien; ca permet de capturer des variables entre -morceaux. - -Les HQQ sont attachees a un contexte, dans lequel seront reversees -leurs variables libres. +{:hexpr(CTX): foo } va alpha renommer toutes -les variables libres de +{foo} et les stocker dans CTX. - -Autre possibilite syntaxique: une +{hexpr: foo } retourne +{foo} et -son contexte. Le contexte permet de decider comment fusionner deux -AST. Il ne se resume pas a une substitution - -->Laurie: - -Your approach is to tell the user that an AST has a sense by itself: -it's an autonomous piece of data that won't interfere with its -environment. Then you re-introduce mechanisms to get some dangling -bits back, so that you can wire the various bits (QQ and splices) -together as you wish. - -Now, the point from which I'll draw my PoV: an AST is only hygienic -relative to a context. Through gensym() we're able to craft ASTs that -are hygienic in all normal contexts, but the notion of contexts -continues to exist. In particular, as soon as we want to connect bits -of AST in interesting ways, we start to need a finer-grained control -of contexts. - -You offer, with $c{ } ${ } and &, ways to poke holes into contexts, -but since you try pretend to the user that there's no context, the -user is screwed when he does want to mess with it. He has to guess how -those hidden contexts work, and find ways to use the tools mentionned -above so that they have the intended effect on the stealth context. - -That's a good approach when there's nothing clever to do with -contexts, and it falls down when users get out of the main use -cases. Depending on how rare it is to need such conttext hacking -capabilities, your approach might or might not be practical. - -The metalua-consistent approach is the opposite one: if there's such a -thing as contexts, and it sometimes makes sense for (advanced) users -to fiddle with them, then there must be first class AST context -objects. If it can be optionally hidden in most common cases, great, -but users aren't supposed to ignore their existence. - -Therefore, whenever you combine two pieces of AST, you specify how -their context must be merged. The 2 most common defaults would be -"don't touch anything" (non-hygienic macros) and "make sure there's no -interference" (full hygiene, no capture). - -In the example we just discussed, the problem is that we have 3 AST -(around, inside and usercode) and we want to splice them in a fancy -way: there's only one capturing variable between around and inside, -and it must not capture anything in usercode. You hack your way around -to build a protective shell on usercode, then toss it with the other -ASTs. I'd rather write that around and inside share the same context, -and usercode has its own. Then when combining all those, the -hygienizer will know what to do. if I wanted to combine around's and -outside's contexts so that they'd only share "q_var", I should be able -to express that as well. - -Bottom line is: I'd rather have 1st class context than implicit ones, -which will be a hassle to interact with when I have to. KISS -engineering at its best :) - - - -Restent a faire: -================ -- reflechir a l'addition automatique des mots-clef par les parsers qui - vont bien. Peut-etre qu'ils sont attaches optionnellement a un lexer, - et que cet attachement est transmis par defaut qd on insere un - parser ds un autre -- notation pour la generation auto de sous-tables -- verifier le runtime error handling patch d'EVE -- anaphoric macros should bind 'it' to a function when it appears directly in - it. it would allow anonymous recursive functions. However, anaphoric - structures tend to capture each other very fast; maybe Arc has insightful - ideas about this? (e.g. different captured names for function and other - structs) - - -Bugs a verifier: -================ -- reentrance de in_a_quote dans mlp_meta.lua -- refuser les flags autres qu'expr dans les splices in_a_quote - -il faudra encore fournir une boucle interactive, celle de lua n'est -pas patchable. Idem pour le compilo. - -metalua compiler/interpreter short usage manual -=============================================== - -Allows to compile and/or run metalua programs, and to use interactive -loop. - ---compile ---run ---interactive ---output ---load-library [@metalevel] ---print-ast - -By default, parameters are passed to all running chunks. However, if -there is are parameters and no running chunk, the first parameter is -taken as the running chunk. - -metalua --compile foo.mlua - -if there is an --output, all run and compiled files will be saved in -it. - -- compile all files. -- displays ASTs depending on --print-ast -- run all files if --run -- run loop after compile and run if -i or if no file -- if no -a and no -o, -x is implied -- if -x and no file, first param is a file - -2nd attempt -=========== - -Compile and/or execute metalua programs. Parameters passed to the -compiler should be prefixed with an option flag, hinting what must be -done with them: take tham as file names to compile, as library names -to load, as parameters passed to the running program... When option -flags lack, metalua tries to adopt a "Do What I Mean" approach: - -- if no code (no library, no literal expression and no file) is - specified, the first flag-less parameter is taken as a file name to - load. - -- if no code and no parameter is passed, an interactive loop is - started. - -- if a target file is specified with --output, the program is not - executed by default, unless a --run flag forces it to. Conversely, - if no --output target is specified, the code is run unless ++run - forbids it. - -When does it compile, when does it execute? -------------------------------------------- -The result of the compilation is saved if there is a --output -specifying a destination file. If not, the result is run. If both ---output and --run are specified, the result is both saved and run. If -the compilation must be saved, it is mandatory to have at least one -file or library. - -How to load chunks to compile/run ---------------------------------- -Files can be passed, with their relative paths, with --file. Libraries -can be loaded, from standard paths, with --library. Litteral blocks of -code can be passed with --literal. - -When does it launch an interactive loop? ----------------------------------------- -When --interactive is specified, or when no chunk is loaded. - - - - -Macro hygiene -============= - -alpha should go at the top of the chunk *where the quote is used*, -which is generally not the same as where it's defined. Ideally, it -should go where the extension using the quote is called. So what I -really need is an improved 'extension' statement, which handles quotes -anhoring. - -A serious problem is that most quotes are inside functions and -therefore not always evaluated in the extension() call. - -Let's consider two instants: -- when the QQ is formally created for compilation (1) -- when it's returned by a function, presumably to be spliced (2) - - - If alpha-conv -happens at QQ creation, then I lose the - - -So, alpha -conversion must happen at CT - - -Extension -========= -The current extension() function automatically loads a runtime, even -when empty. More control should be given. Both RT and CT parts should -go in the same directory. Finally, RT should probably handle macro -hygiene in a standardized way. - -==> extension() should be like a require(), except that it: -- prepends 'extension.' to the module name -- returns nil when require() returns true - -From there, macro alphas could be handled as follows: -- create a common alpha at the opening of the extension. As an empty list. -- push in in H() so that it'll be shared by all hygienizations -- return it if there' no runtime, or return it with a require() - statement for the RT lib. - -Shipping 0.4 -============ -- remove autotable: I don't use it and it isn't polished enough to be - useful yet. - -- remove machine and fsm_test; or at least, put it in samples - -- remove mandatory platform from makefile call - -- H loads extension/H-runtime.mlua? - - - -README.TXT -========== -For installation matters, cf. INSTALL.TXT - -Metalua 0.4 -=========== -Metalua is a static metaprogramming system for Lua: a set of tools -that let you alter the compilation process in arbitrary, powerful and -maintainable ways. For the potential first-time users of such a -system, a descripition of these tools, as implemented by metalua, -follows. - -Dynamic Parsers ---------------- -One of these tools is dynamic parser: a source file can change the -syntax recognized by the parser while it's being parsed. Taken alone, -this feature lets you make superficial syntax tweaks on the -language. The parser is based on a parser combinator called 'gg'. You -should know the half dozen functions in gg API to do advanced things, -but it means you can use and define functions that transform parsers: - -- There are a couple of very simple combinators like gg.list, - gg.sequence, qq.multisequence, gg.optkeyword etc. that offer a level - of expressiveness comparable to Yacc-like parsers. For instance, if - mlp.expr parses Lua expressions, gg.list{ mlp.expr } creates a - parser which handles lists of Lua expressions. - -- Since you can create all the combinators you can think of, there - also are combinators specialized for typical language tasks. In - Yacc-like systems, the language definition quickly becomes - unreadable, because all non-native features have to be encoded in - clumsy and brittle ways; so if your parser won't natively let you - specify infix operator precedence and associativity easily, tough - luck for you and your code maintainers. With combinators, this is - abstracted away in a regular function, so you just write: - - > mlp.expr.infix:add{ "xor", prec=40, assoc='left', builder=xor_builder } - - Moreover, combinators tend to produce usable error messages when fed - with syntactically incorrect inputs. It matters, because clearly - explaining why an invalid input is invalid is almost as important as - compiling a valid one, for a use=able compiler. - -Yacc-like systems might seem simpler to adopt than combinators, as -long as they're used on extremely simple problems. However, if if you -either try to write something non trivial, or to write a simple macro -in a robust way, you'll need to use lots of messy tricks and hacks, -and spend much more time getting them (seemingly) ritght than that 1/2 -hour required to master most of gg. - - -Real meta-programming ---------------------- -If you plan to go beyond trivial keyword-for-keyword syntax tweaks, -what will limit you is the ability to manipulate source code -conveniently: without the proper tools and abstractions, even the -simplest tasks will turn into a dirty hacks fest, then either into a -nightmare, or most often into abandonware. Providing an empowering -framework is metalua's whole purpose. The core principle is that -programs prefer to manipulate code as trees (whereas most developers -prefer ASCII sources). The make-or-break deal is then: - -- To easily let users see sources as trees, as sources, or as - combination thereof, and switch representations seamlessly. - -- To offer the proper libraries, that won't force you to reinvent a - square wheel, will take care of the most common pitfalls, won't - force you to resort to brittle hacks. - -On the former point, Lisps are at a huge advantage, their user syntax -already being trees. But languages with casual syntax can also offer -interchangeable tree/source views; metalua has some quoting +{ ... } -and anti-quoting -{ ... } operators which let you switch between both -representations at will: internally it works on trees, but you always -have the option to see them as quoted sources. Metalua also supports a -slightly improved syntax for syntax trees, to improve their -readability. - -Library-wise, metalua offers a set of syntax tree manipulation tools: - -- Structural pattern matching, a feature traditionally found in - compiler-writing specialized languages (and which has nothing to do - with string regular expressions BTW), which lets you express - advanced tree analysis operations in a compact, readable and - efficient way. If you regularly have to work with advanced data - structures and you try it, you'll never go back. - -- The walker library allows you to perform transformations on big - portions of programs. It lets you easily express things like: - "replace all return statements which aren't in a nested function by - error statements", "rename all local variables and their instances - into unique fresh names", "list the variables which escape this - chunk's scope", "insert a type-checking instruction into every - assignments to variable X", etc. You can't write many non-trivial - macros without needing to do some of those global code - transformations. - -- Macro hygiene, although not perfect yet in metalua, is required if - you want to make macro writing reasonably usable (and contrary to a - popular belief, renaming local variables into fresh names only - address the easiest part of the hygiene issue; cf. changelog below - for more details). - -- The existing extensions are progressively refactored in more modular - ways, so that their features can be effectively reused in other - extensions. - - -Notworthy changes since 0.3 -=========================== - -- A significantly bigger code base, mostly due to more libraries: - about 2.5KLoC for libs, 4KLoC for the compiler. However, this remains - tiny in today's desktop computers standards. You don't have to know - all of the system to do useful stuff with it, and since compiled - files are Lua 5.1 compatible, you can keep the "big" system on a - development platform, and keep a lightweight runtime for embedded or - otherwise underpowered targets. - - -- The compiler/interpreter front-end is completely rewritten. The new - program, aptly named 'metalua', supports proper passing of arguments - to programs, and is generally speaking much more user friendly than - the mlc from the previous version. - - -- Metalua source libraries are looked for in environmemt variable - LUA_MPATH, distinct from LUA_PATH. This way, in an application - that's part Lua part Metalua, you keep a natural access to the - native Lua compiler. - - By convention, metalua source files should have extension .mlua. By - default, bytecode and plain lua files are preferred to metalua - sources, which lets you easily precompile your libraries. - - -- Compilation of files are separated in different Lua Rings: this - prevents unwanted side-effects when several files are compiled - (This can be turned off, but shouldn't be IMO). - - -- Metalua features are accessible programmatically. Library - 'metalua.runtime' loads only the libraries necessary to run an - already compiled file; 'metalua.compile' loads everything useful at - compile-time. - - Transformation functions are available in a library 'mlc' that - contains all meaningful transformation functions in the form - 'mlc.destformat_of_sourceformat()', such as 'mlc.luacfile_of_ast()', - 'mlc.function_of_luastring()' etc. This library has been - significantly completed and rewritten (in metalua) since v0.3. - - -- Helper libraries have been added. For now they're in the - distribution, at some point they should be luarocked in. These - include: - - Lua Rings and Pluto, duct-taped together into Springs, an improved - Rings that lets states exchange arbitrary data instead of just - scalars and strings. - - Lua bits for bytecode dumping. - - As always, very large amounts of code borrowed from Yueliang. - - As a commodity, I've also packaged Lua sources in. - - -- Builds on Linux, OSX, Microsoft Visual Studio. Might build on mingw - (not tested recently). It's easily ported to all systems with a full - support for lua. - - The MS-windows building is hackish: it's driven by a batch script, - and Pluto can't compile as a win32 DLL, so it's linked in the Lua - VM. If you want to run your own VM, either link pluto in statically, - or disabled separate compilation by setting environment variable - LUA_MFAST at true. In the later case, expect puzzling behaviors when - you load several sources containing compile-time code (==> - precompile everything). - - Notice that bits of the compiler itself are now written in metalua, - which means that its building now goes through a bootstrapping - stage. - - -- Structural pattern matching: - - now also handles string regular expressions: 'someregexp'/pattern - will match if the tested term is a string accepted by the regexp, - and on success, the list of captures done by the regexp is matched - against pattern. - - Matching of multiple values has been optimized - - the default behavior when no case match is no to raise an error, - it's the most commonly expected case in practice. Trivial to - cancel with a final catch-all pattern. - - generated calls to type() are now hygienic (it's been the cause of - a puzzling bug report; again, hygiene is hard). - - -- AST grammar overhaul: - The whole point of being alpha is to fix APIs with a more relaxed - attitude towards backward compatibility. I think and hope it's the - last AST revision, so here is it: - - `Let{...} is now called `Set{...} - (Functional programmers would expect 'Let' to introduce an - immutable binding, and assignment isn't immutable in Lua) - - `Key{ key, value } in table literals is now written `Pair{ key, value } - (it contained a key *and* its associated value; besides, 'Pair' is - consistent with the name of the for-loop iterator) - - `Method{...} is now `Invoke{...} - (because it's a method invocation, not a method declaration) - - `One{...} is now `Paren{...} and is properly documented - (it's the node representing parentheses: it's necessary, since - parentheses are sometimes meaningful in Lua) - - Operator are simplified: `Op{ 'add', +{2}, +{2} } instead of - `Op{ `Add, +{2}, +{2} }. Operator names match the corresponding - metatable entries, without the leading double-underscore. - - The operators which haven't a metatable counterpart are - deprecated: 'ne', 'ge', 'gt'. - - -- Overhaul of the code walking library: - - the API has been simplified: the fancy predicates proved more - cumbersome to use than a bit pattern matching in the visitors - - binding identifiers are handled as a distinct AST class - - walk.id is scope-aware, handles free and bound variables in a - sensible way. - - the currified API proved useless and sometimes cumbersome, it's - been removed. - - -- Hygiene: I originally planned to release a full-featured hygienic - macro system with v0.4, but what exists remains a work in - progress. Lua is a Lisp-1, which means unhygienic macros are very - dangerous, and hygiene a la Scheme pretty much limits macro writing - to a term rewriting subset of the language, which is crippling to - use. - - Note: inside hygiene, i.e. local variables created by the macro - which might capture user's variable instances, is trivial to address - by alpha conversion. The trickier part is outside hygiene, when - user's binders capture globals required by the macro-generated - code. That's the cause of pretty puzzling and hard to find bugs. And - the *really* tricky part, which is still unsolved in metalua, is - when you have several levels of nesting between user code and macro - code. For now this case has to be hygienized by hand. - - Note 2: Converge has a pretty powerful approach of hygienic macros - in a Lisp-1 language; for long and boringly technical reasons, I - don't think its approch would be the best suited to metalua. - - Note 3: Redittors must have read that Paul Graham has released Arc, - which is also a Lisp-1 with Common Lisp style macros; I expect this - to create a bit of buzz, out of which might emerge proper solutions - the macro hygiene problem. - - -- No more need to create custom syntax for macros when you don't want - to. Extension 'dollar' will let you declare macros in the dollar - table, as in +{block: function dollar.MYMACRO(a, b, c) ... end}, - and use it as $MYMACRO(1, 2, 3) in your code. - - With this extension, you can write macros without knowing anything - about the metalua parser. Together with quasi-quotes and automatic - hygiene, this will probably be the closest we can go to "macros for - dummies" without creating an unmaintainable mess generator. - - -- Lexers can be switched on the fly. This lets you change the set of - keywords temporarily, with the new gg.with_lexer() functor, or - handle radically different syntaxes in a single file (think - multiple-languages systems such as LuaTeX, or programs+goo as PHP). - - -- Incorporation of the bugs listed on the mailing list and the blog. - - -- New samples and extensions, in various states of completion: - - - lists by comprehension, a la python/haskell. It includes lists - chunking, e.g. mylist[1 ... 3, 5 ... 7] - - - anaphoric macros for 'if' and 'while' statements: with this - extension, the condition of the 'if'/'while' is bound to variable - 'it' in the body; it lets you write things like: - while file:read '*l' do print(it) end. - No runtime overhead when 'it' isn't used in the body. An anaphoric - variable should also be made accessible for functions, to let - easily write anonymous recursive functions. - - - continue statement, logging facility, ternary "?:" choice operator - - - Try ... catch ... finally extension. - - - with ... do extension: it uses try/finally to make sure that - resources will be properly closed. The only constraint on - resources is that they have to sport a :close() releasing method. - For instance, he following code guarantees that file1 and file2 - will be closed, even if a return or an error occurs in the body. - - with file1, file2 = io.open "f1.txt", io.open "f2.txt" do - contents = file1:read'*a' .. file2:read ;*a' - end - - -Credits -======= -I'd like to thank the people who wrote the open source code which -makes metalua run: the Lua team, the authors of Yueliang, Pluto, Lua -Rings, Bitlib; the people whose bug reports, patches and insightful -discussions dramatically improved the global design, including -Laurence Tratt, Viacheslav Egorov, David Manura, John Belmonte, Eric -Raible... diff --git a/junk/todo.txt b/junk/todo.txt deleted file mode 100644 index ae1c739..0000000 --- a/junk/todo.txt +++ /dev/null @@ -1,4 +0,0 @@ -- macro hygiene, wrt inside and outside (anchoring) -- doc: lambda isn't a multiret anymore -- fix ^ indicator on error reports (one token too far) -- forbid flags other than expr in +{-{flag:...}} diff --git a/junk/typecheck.mlua b/junk/typecheck.mlua deleted file mode 100644 index 7ed6b89..0000000 --- a/junk/typecheck.mlua +++ /dev/null @@ -1,106 +0,0 @@ --- static partial checkings for Lua. --- --- This program checks some metalua or plain lua source code for most common --- mistakes. Its design focuses on the ability to check plain lua code: there is --- no need to load any extension in the module. --- --- The current checkings include: --- --- * Listing all free variables, and make sure they are declared. --- * For free vars known as modules, check that indexings in them are also --- declared. --- * When the type of something is known, do some basic type checkings. These --- checkings are by no means exhaustive; however, when a parameter function --- is constant or statically declared, it's checked. - - - ---[[ -Type grammar: - -t ::= -| t and t -| t or t -| function (t, ...) return t, ... end -| { (k=t)... } -| table(t, t) -| string -| number -| integer -| boolean -| userdata -| nil -| multi(t, ...) -| _ - ---]] - - -match function get_type -| `Number{...} -> return +{number} -| `String{...} -> return +{string} -| `True|`False -> return +{boolean} -| `Nil -> return +{nil} -| `Dots -> return +{_} -| `Stat{_,v} -> return get_type(v) -| `Paren{t} -> return get_one_type(t) -| `Call{f, ...} -> - local ftype = get_type(f) - match ftype with - | `Function{ _, {`Return{result}} } -> return get_type(result) - | `Function{ _, {`Return{...} == results} } -> - local r2 = +{ multi() } - for r in ivalues(results) table.insert(r2, get_type(r)) end - return r2 - | `And{...} -> return +{_} -- not implemented - | `Or{ a, b } -> match get_one_type(a), get_one_type(b) with - | `Function{...}==f1, `Function{...}==f2 -> - return `Op{ 'or', get_type(`Call{f1}), get_type(`Call{f2})} - | `Function{...}==f, _ | _, `Function{...}==f -> - return get_type(`Call{f}) - | _ -> return +{_} - end -| `Invoke{o, m, ... } == x -> return get_type(`Call{`Index{o, m}}) -| `Op{...}==o -> return get_op_type(o) -| `Table{...}==t -> - local r = `Table{ } - for x in ivalues(t) do - match x with - | `Pair{ `String{...}==k, v } -> table.insert(r, `Pair{k, get_one_type(v)}) - | t -> table.insert(r, get_one_type(t)) - end - end - return r -| `Function{...}==f -> -| `Id{v} -> -| `Index{t, k} -> match get_one_type(t), get_one_type(k) with - | `Call{`Id 'table', tk, tv }, _ -> return tv - | `Table{...}==tt, `Id 'string' -> - -local types_rt = require 'extension.types' - -function check_function(f, term) - match get_type(term) with - | `Function{ params, {`Return{...} == results}}, args -> - | `And{ a, b }, args -> - check_function(a, args) - check_function(b, args) - | `Or{ a, b }, args -> - if not pcall(check_function, a, args) then check_function(b, args) end - | `Id '_' -> -- pass - | _ -> error ("Call to a non-function") - end -end - -function check_index(a, b, term) - match get_type(term) with - | `Table{} - -match function cfg.id.up -| `Call{ f, ... } == x -> check_function (f, x) -| `Index{ a, b } == x -> check_index (a, b, x) -end - - --- List free vars -cfg.id. \ No newline at end of file diff --git a/metalua-compiler-0.7.2-1.rockspec b/metalua-compiler-0.7.2-1.rockspec new file mode 100644 index 0000000..92ca1e1 --- /dev/null +++ b/metalua-compiler-0.7.2-1.rockspec @@ -0,0 +1,60 @@ +--*-lua-*-- +package = "metalua-compiler" +version = "0.7.2-1" +source = { + url = "git://git.eclipse.org/gitroot/koneki/org.eclipse.koneki.metalua.git", + tag = "v0.7.2", +} + +description = { + summary = "Metalua's compiler: converting (Meta)lua source strings and files into executable Lua 5.1 bytecode", + detailed = [[ + This is the Metalua copmiler, packaged as a rock, depending + on the spearate metalua-parser AST generating library. It + compiles a superset of Lua 5.1 into bytecode, which can + then be loaded and executed by a Lua 5.1 VM. It also allows + to dump ASTs back into Lua source files. + ]], + homepage = "http://git.eclipse.org/c/koneki/org.eclipse.koneki.metalua.git", + license = "EPL + MIT" +} + +dependencies = { + "lua ~> 5.1", -- Lua 5.2 bytecode not supported + "checks >= 1.0", -- Argument type checking + "luafilesystem >= 1.6.2", -- Cached compilation based on file timestamps + "readline >= 1.3", -- Better REPL experience + "metalua-parser == 0.7.2", -- AST production +} + +build = { + type="builtin", + modules={ + ["metalua"] = "metalua.lua", + ["metalua.compiler.bytecode"] = "metalua/compiler/bytecode.lua", + ["metalua.compiler.globals"] = "metalua/compiler/globals.lua", + ["metalua.compiler.bytecode.compile"] = "metalua/compiler/bytecode/compile.lua", + ["metalua.compiler.bytecode.lcode"] = "metalua/compiler/bytecode/lcode.lua", + ["metalua.compiler.bytecode.lopcodes"] = "metalua/compiler/bytecode/lopcodes.lua", + ["metalua.compiler.bytecode.ldump"] = "metalua/compiler/bytecode/ldump.lua", + ["metalua.loader"] = "metalua/loader.lua", + }, + install={lua={ + ["metalua.treequery"] = "metalua/treequery.mlua", + ["metalua.compiler.ast_to_src"] = "metalua/compiler/ast_to_src.mlua", + ["metalua.treequery.walk"] = "metalua/treequery/walk.mlua", + ["metalua.extension.match"] = "metalua/extension/match.mlua", + ["metalua.extension.comprehension"] = "metalua/extension/comprehension.mlua", + ["metalua.repl"] = "metalua/repl.mlua", + }} +} + +--[==[-- Generate file lists +for _, ext in ipairs{ 'lua', 'mlua' } do + for filename in io.popen("find metalua -name '*."..ext.."'") :lines() do + local modname = filename :gsub ('/', '.') :gsub ('%.'..ext..'$', '') + print((' '):rep(8)..'["' .. modname .. '"] = "' .. filename .. '",') + end + print"" +end +--]==]-- diff --git a/metalua-parser-0.7.2-1.rockspec b/metalua-parser-0.7.2-1.rockspec new file mode 100644 index 0000000..77718cf --- /dev/null +++ b/metalua-parser-0.7.2-1.rockspec @@ -0,0 +1,42 @@ +--*-lua-*-- +package = "metalua-parser" +version = "0.7.2-1" +source = { + url = "git://git.eclipse.org/gitroot/koneki/org.eclipse.koneki.metalua.git", + tag = "v0.7.2", +} +description = { + summary = "Metalua's parser: converting Lua source strings and files into AST", + detailed = [[ + This is a subset of the full Metalua compiler. It defines and generates an AST + format for Lua programs, which offers a nice level of abstraction to reason about + and manipulate Lua programs. + ]], + homepage = "http://git.eclipse.org/c/koneki/org.eclipse.koneki.metalua.git", + license = "EPL + MIT" +} +dependencies = { + "lua ~> 5.1", + "checks >= 1.0", +} +build = { + type="builtin", + modules={ + ["metalua.grammar.generator"] = "metalua/grammar/generator.lua", + ["metalua.grammar.lexer"] = "metalua/grammar/lexer.lua", + ["metalua.compiler.parser"] = "metalua/compiler/parser.lua", + ["metalua.compiler.parser.common"] = "metalua/compiler/parser/common.lua", + ["metalua.compiler.parser.table"] = "metalua/compiler/parser/table.lua", + ["metalua.compiler.parser.ext"] = "metalua/compiler/parser/ext.lua", + ["metalua.compiler.parser.annot.generator"] = "metalua/compiler/parser/annot/generator.lua", + ["metalua.compiler.parser.annot.grammar"] = "metalua/compiler/parser/annot/grammar.lua", + ["metalua.compiler.parser.stat"] = "metalua/compiler/parser/stat.lua", + ["metalua.compiler.parser.misc"] = "metalua/compiler/parser/misc.lua", + ["metalua.compiler.parser.lexer"] = "metalua/compiler/parser/lexer.lua", + ["metalua.compiler.parser.meta"] = "metalua/compiler/parser/meta.lua", + ["metalua.compiler.parser.expr"] = "metalua/compiler/parser/expr.lua", + ["metalua.compiler"] = "metalua/compiler.lua", + ["metalua.pprint"] = "metalua/pprint.lua", + } +} + diff --git a/metalua.lua b/metalua.lua new file mode 100644 index 0000000..4641380 --- /dev/null +++ b/metalua.lua @@ -0,0 +1,274 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-- Survive lack of checks +if not pcall(require, 'checks') then function package.preload.checks() function checks() end end end + +-- Main file for the metalua executable +require 'metalua.loader' -- load *.mlue files +require 'metalua.compiler.globals' -- metalua-aware loadstring, dofile etc. + +local alt_getopt = require 'alt_getopt' +local pp = require 'metalua.pprint' +local mlc = require 'metalua.compiler' + +local M = { } + +local AST_COMPILE_ERROR_NUMBER = -1 +local RUNTIME_ERROR_NUMBER = -3 + +local alt_getopt_options = "f:l:e:o:xivaASbs" + +local long_opts = { + file='f', + library='l', + literal='e', + output='o', + run='x', + interactive='i', + verbose='v', + ['print-ast']='a', + ['print-ast-lineinfo']='A', + ['print-src']='S', + ['meta-bugs']='b', + ['sharp-bang']='s', +} + +local chunk_options = { + library=1, + file=1, + literal=1 +} + +local usage=[[ + +Compile and/or execute metalua programs. Parameters passed to the +compiler should be prefixed with an option flag, hinting what must be +done with them: take tham as file names to compile, as library names +to load, as parameters passed to the running program... When option +flags are absent, metalua tries to adopt a "Do What I Mean" approach: + +- if no code (no library, no literal expression and no file) is + specified, the first flag-less parameter is taken as a file name to + load. + +- if no code and no parameter is passed, an interactive loop is + started. + +- if a target file is specified with --output, the program is not + executed by default, unless a --run flag forces it to. Conversely, + if no --output target is specified, the code is run unless ++run + forbids it. +]] + +function M.cmdline_parser(...) + local argv = {...} + local opts, optind, optarg = + alt_getopt.get_ordered_opts({...}, alt_getopt_options, long_opts) + --pp.printf("argv=%s; opts=%s, ending at %i, with optarg=%s", + -- argv, opts, optind, optarg) + local s2l = { } -- short to long option names conversion table + for long, short in pairs(long_opts) do s2l[short]=long end + local cfg = { chunks = { } } + for i, short in pairs(opts) do + local long = s2l[short] + if chunk_options[long] then table.insert(cfg.chunks, { tag=long, optarg[i] }) + else cfg[long] = optarg[i] or true end + end + cfg.params = { select(optind, ...) } + return cfg +end + +function M.main (...) + + local cfg = M.cmdline_parser(...) + + ------------------------------------------------------------------- + -- Print messages if in verbose mode + ------------------------------------------------------------------- + local function verb_print (fmt, ...) + if cfg.verbose then + return pp.printf ("[ "..fmt.." ]", ...) + end + end + + if cfg.verbose then + verb_print("raw options: %s", cfg) + end + + ------------------------------------------------------------------- + -- If there's no chunk but there are params, interpret the first + -- param as a file name. + if not next(cfg.chunks) and next(cfg.params) then + local the_file = table.remove(cfg.params, 1) + verb_print("Param %q considered as a source file", the_file) + cfg.file={ the_file } + end + + ------------------------------------------------------------------- + -- If nothing to do, run REPL loop + if not next(cfg.chunks) and not cfg.interactive then + verb_print "Nothing to compile nor run, force interactive loop" + cfg.interactive=true + end + + + ------------------------------------------------------------------- + -- Run if asked to, or if no --output has been given + -- if cfg.run==false it's been *forced* to false, don't override. + if not cfg.run and not cfg.output then + verb_print("No output file specified; I'll run the program") + cfg.run = true + end + + local code = { } + + ------------------------------------------------------------------- + -- Get ASTs from sources + + local last_file_idx + for i, x in ipairs(cfg.chunks) do + local compiler = mlc.new() + local tag, val = x.tag, x[1] + verb_print("Compiling %s", x) + local st, ast + if tag=='library' then + ast = { tag='Call', + {tag='Id', "require" }, + {tag='String', val } } + elseif tag=='literal' then ast = compiler :src_to_ast(val) + elseif tag=='file' then + ast = compiler :srcfile_to_ast(val) + -- Isolate each file in a separate fenv + ast = { tag='Call', + { tag='Function', { { tag='Dots'} }, ast }, + { tag='Dots' } } + ast.source = '@'..val + code.source = '@'..val + last_file_idx = i + else + error ("Bad option "..tag) + end + local valid = true -- TODO: check AST's correctness + if not valid then + pp.printf ("Cannot compile %s:\n%s", x, ast or "no msg") + os.exit (AST_COMPILE_ERROR_NUMBER) + end + ast.origin = x + table.insert(code, ast) + end + -- The last file returns the whole chunk's result + if last_file_idx then + -- transform +{ (function(...) -{ast} end)(...) } + -- into +{ return (function(...) -{ast} end)(...) } + local prv_ast = code[last_file_idx] + local new_ast = { tag='Return', prv_ast } + code[last_file_idx] = new_ast + end + + -- Further uses of compiler won't involve AST transformations: + -- they can share the same instance. + -- TODO: reuse last instance if possible. + local compiler = mlc.new() + + ------------------------------------------------------------------- + -- AST printing + if cfg['print-ast'] or cfg['print-ast-lineinfo'] then + verb_print "Resulting AST:" + for _, x in ipairs(code) do + pp.printf("--- AST From %s: ---", x.source) + if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end + local pp_cfg = cfg['print-ast-lineinfo'] + and { line_max=1, fix_indent=1, metalua_tag=1 } + or { line_max=1, metalua_tag=1, hide_hash=1 } + pp.print(x, 80, pp_cfg) + end + end + + ------------------------------------------------------------------- + -- Source printing + if cfg['print-src'] then + verb_print "Resulting sources:" + for _, x in ipairs(code) do + printf("--- Source From %s: ---", table.tostring(x.source, 'nohash')) + if x.origin and x.origin.tag=='File' then x=x[1][1][2] end + print (compiler :ast2string (x)) + end + end + + -- TODO: canonize/check AST + + local bytecode = compiler :ast_to_bytecode (code) + code = nil + + ------------------------------------------------------------------- + -- Insert #!... command + if cfg.sharpbang then + local shbang = cfg.sharpbang + verb_print ("Adding sharp-bang directive %q", shbang) + if not shbang :match'^#!' then shbang = '#!' .. shbang end + if not shbang :match'\n$' then shbang = shbang .. '\n' end + bytecode = shbang .. bytecode + end + + ------------------------------------------------------------------- + -- Save to file + if cfg.output then + -- FIXME: handle '-' + verb_print ("Saving to file %q", cfg.output) + local file, err_msg = io.open(cfg.output, 'wb') + if not file then error("can't open output file: "..err_msg) end + file:write(bytecode) + file:close() + if cfg.sharpbang and os.getenv "OS" ~= "Windows_NT" then + pcall(os.execute, 'chmod a+x "'..cfg.output..'"') + end + end + + ------------------------------------------------------------------- + -- Run compiled code + if cfg.run then + verb_print "Running" + local f = compiler :bytecode_to_function (bytecode) + bytecode = nil + -- FIXME: isolate execution in a ring + -- FIXME: check for failures + local function print_traceback (errmsg) + return errmsg .. '\n' .. debug.traceback ('',2) .. '\n' + end + local function g() return f(unpack (cfg.params)) end + local st, msg = xpcall(g, print_traceback) + if not st then + io.stderr:write(msg) + os.exit(RUNTIME_ERROR_NUMBER) + end + end + + ------------------------------------------------------------------- + -- Run REPL loop + if cfg.interactive then + verb_print "Starting REPL loop" + require 'metalua.repl' .run() + end + + verb_print "Done" + +end + +return M.main(...) diff --git a/metalua/bytecode.lua b/metalua/bytecode.lua new file mode 100644 index 0000000..b3afbdb --- /dev/null +++ b/metalua/bytecode.lua @@ -0,0 +1,29 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +local compile = require 'metalua.compiler.bytecode.compile' +local ldump = require 'metalua.compiler.bytecode.ldump' + +local M = { } + +M.ast_to_proto = compile.ast_to_proto +M.proto_to_bytecode = ldump.dump_string +M.proto_to_file = ldump.dump_file + +return M \ No newline at end of file diff --git a/metalua/compiler.lua b/metalua/compiler.lua new file mode 100644 index 0000000..69cd7b2 --- /dev/null +++ b/metalua/compiler.lua @@ -0,0 +1,181 @@ +--------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Convert between various code representation formats. Atomic +-- converters are written in extenso, others are composed automatically +-- by chaining the atomic ones together in a closure. +-- +-- Supported formats are: +-- +-- * srcfile: the name of a file containing sources. +-- * src: these sources as a single string. +-- * lexstream: a stream of lexemes. +-- * ast: an abstract syntax tree. +-- * proto: a (Yueliang) struture containing a high level +-- representation of bytecode. Largely based on the +-- Proto structure in Lua's VM +-- * bytecode: a string dump of the function, as taken by +-- loadstring() and produced by string.dump(). +-- * function: an executable lua function in RAM. +-- +-------------------------------------------------------------------------------- + +require 'checks' + +local M = { } + +-------------------------------------------------------------------------------- +-- Order of the transformations. if 'a' is on the left of 'b', then a 'a' can +-- be transformed into a 'b' (but not the other way around). +-- M.sequence goes for numbers to format names, M.order goes from format +-- names to numbers. +-------------------------------------------------------------------------------- +M.sequence = { + 'srcfile', 'src', 'lexstream', 'ast', 'proto', 'bytecode', 'function' } + +local arg_types = { + srcfile = { 'string', '?string' }, + src = { 'string', '?string' }, + lexstream = { 'lexer.stream', '?string' }, + ast = { 'table', '?string' }, + proto = { 'table', '?string' }, + bytecode = { 'string', '?string' }, +} + +if false then + -- if defined, runs on every newly-generated AST + function M.check_ast(ast) + local function rec(x, n, parent) + if not x.lineinfo and parent.lineinfo then + local pp = require 'metalua.pprint' + pp.printf("WARNING: Missing lineinfo in child #%s `%s{...} of node at %s", + n, x.tag or '', tostring(parent.lineinfo)) + end + for i, child in ipairs(x) do + if type(child)=='table' then rec(child, i, x) end + end + end + rec(ast, -1, { }) + end +end + + +M.order= { }; for a,b in pairs(M.sequence) do M.order[b]=a end + +local CONV = { } -- conversion metatable __index + +function CONV :srcfile_to_src(x, name) + checks('metalua.compiler', 'string', '?string') + name = name or '@'..x + local f, msg = io.open (x, 'rb') + if not f then error(msg) end + local r, msg = f :read '*a' + if not r then error("Cannot read file '"..x.."': "..msg) end + f :close() + return r, name +end + +function CONV :src_to_lexstream(src, name) + checks('metalua.compiler', 'string', '?string') + local r = self.parser.lexer :newstream (src, name) + return r, name +end + +function CONV :lexstream_to_ast(lx, name) + checks('metalua.compiler', 'lexer.stream', '?string') + local r = self.parser.chunk(lx) + r.source = name + if M.check_ast then M.check_ast (r) end + return r, name +end + +local bytecode_compiler = nil -- cache to avoid repeated `pcall(require(...))` +local function get_bytecode_compiler() + if bytecode_compiler then return bytecode_compiler else + local status, result = pcall(require, 'metalua.compiler.bytecode') + if status then + bytecode_compiler = result + return result + elseif string.match(result, "not found") then + error "Compilation only available with full Metalua" + else error (result) end + end +end + +function CONV :ast_to_proto(ast, name) + checks('metalua.compiler', 'table', '?string') + return get_bytecode_compiler().ast_to_proto(ast, name), name +end + +function CONV :proto_to_bytecode(proto, name) + return get_bytecode_compiler().proto_to_bytecode(proto), name +end + +function CONV :bytecode_to_function(bc, name) + checks('metalua.compiler', 'string', '?string') + return loadstring(bc, name) +end + +-- Create all sensible combinations +for i=1,#M.sequence do + local src = M.sequence[i] + for j=i+2, #M.sequence do + local dst = M.sequence[j] + local dst_name = src.."_to_"..dst + local my_arg_types = arg_types[src] + local functions = { } + for k=i, j-1 do + local name = M.sequence[k].."_to_"..M.sequence[k+1] + local f = assert(CONV[name], name) + table.insert (functions, f) + end + CONV[dst_name] = function(self, a, b) + checks('metalua.compiler', unpack(my_arg_types)) + for _, f in ipairs(functions) do + a, b = f(self, a, b) + end + return a, b + end + --printf("Created M.%s out of %s", dst_name, table.concat(n, ', ')) + end +end + + +-------------------------------------------------------------------------------- +-- This one goes in the "wrong" direction, cannot be composed. +-------------------------------------------------------------------------------- +function CONV :function_to_bytecode(...) return string.dump(...) end + +function CONV :ast_to_src(...) + require 'metalua.loader' -- ast_to_string isn't written in plain lua + return require 'metalua.compiler.ast_to_src' (...) +end + +local MT = { __index=CONV, __type='metalua.compiler' } + +function M.new() + local parser = require 'metalua.compiler.parser' .new() + local self = { parser = parser } + setmetatable(self, MT) + return self +end + +return M \ No newline at end of file diff --git a/src/lib/metalua/ast_to_string.mlua b/metalua/compiler/ast_to_src.mlua similarity index 78% rename from src/lib/metalua/ast_to_string.mlua rename to metalua/compiler/ast_to_src.mlua index e27827d..65283c6 100644 --- a/src/lib/metalua/ast_to_string.mlua +++ b/metalua/compiler/ast_to_src.mlua @@ -1,9 +1,28 @@ --{ extension 'match' } +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-{ extension ('match', ...) } local M = { } M.__index = M -ast_to_string = |x| M.run(x) +local pp=require 'metalua.pprint' -------------------------------------------------------------------------------- -- Instanciate a new AST->source synthetizer @@ -67,18 +86,20 @@ end -------------------------------------------------------------------------------- -- Keywords, which are illegal as identifiers. -------------------------------------------------------------------------------- -local keywords = table.transpose { +local keywords_list = { "and", "break", "do", "else", "elseif", "end", "false", "for", "function", "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", "true", "until", "while" } +local keywords = { } +for _, kw in pairs(keywords_list) do keywords[kw]=true end -------------------------------------------------------------------------------- -- Return true iff string `id' is a legal identifier name. -------------------------------------------------------------------------------- local function is_ident (id) - return id:strmatch "^[%a_][%w_]*$" and not keywords[id] + return string['match'](id, "^[%a_][%w_]*$") and not keywords[id] end -------------------------------------------------------------------------------- @@ -102,7 +123,7 @@ end local op_preprec = { { "or", "and" }, { "lt", "le", "eq", "ne" }, - { "concat" }, + { "concat" }, { "add", "sub" }, { "mul", "div", "mod" }, { "unary", "not", "len" }, @@ -115,7 +136,7 @@ local op_preprec = { local op_prec = { } for prec, ops in ipairs (op_preprec) do - for op in ivalues (ops) do + for _, op in ipairs (ops) do op_prec[op] = prec end end @@ -139,6 +160,7 @@ local op_symbol = { -------------------------------------------------------------------------------- function M:node (node) assert (self~=M and self._acc) + if node==nil then self:acc'<>'; return end if not node.tag then -- tagless block. self:list (node, self.nl) else @@ -150,7 +172,7 @@ function M:node (node) else -- No appropriate method, fall back to splice dumping. -- This cannot happen in a plain Lua AST. self:acc " -{ " - self:acc (table.tostring (node, "nohash"), 80) + self:acc (pp.tostring (node, {metalua_tag=1, hide_hash=1}), 80) self:acc " }" end end @@ -162,13 +184,13 @@ end -- it can be a string or a synth method. -- `start' is an optional number (default == 1), indicating which is the -- first element of list to be converted, so that we can skip the begining --- of a list. +-- of a list. -------------------------------------------------------------------------------- function M:list (list, sep, start) for i = start or 1, # list do self:node (list[i]) if list[i + 1] then - if not sep then + if not sep then elseif type (sep) == "function" then sep (self) elseif type (sep) == "string" then self:acc (sep) else error "Invalid list separator" end @@ -192,7 +214,7 @@ end -- There are several things that could be refactored into common subroutines -- here: statement blocks dumping, function dumping... -- However, given their small size and linear execution --- (they basically perform series of :acc(), :node(), :list(), +-- (they basically perform series of :acc(), :node(), :list(), -- :nl(), :nlindent() and :nldedent() calls), it seems more readable -- to avoid multiplication of such tiny functions. -- @@ -211,8 +233,8 @@ end function M:Set (node) match node with - | `Set{ { `Index{ lhs, `String{ method } } }, - { `Function{ { `Id "self", ... } == params, body } } } + | `Set{ { `Index{ lhs, `String{ method } } }, + { `Function{ { `Id "self", ... } == params, body } } } if is_idx_stack (lhs) and is_ident (method) -> -- ``function foo:bar(...) ... end'' -- self:acc "function " @@ -239,7 +261,7 @@ function M:Set (node) self:nldedent () self:acc "end" - | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs } + | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs } if not is_ident (lhs1name) -> -- ``foo, ... = ...'' when foo is *not* a valid identifier. -- In that case, the spliced 1st variable must get parentheses, @@ -260,6 +282,20 @@ function M:Set (node) self:list (lhs, ", ") self:acc " = " self:list (rhs, ", ") + | `Set{ lhs, rhs, annot } -> + -- ``... = ...'', no syntax sugar, annotation -- + local n = #lhs + for i=1,n do + local ell, a = lhs[i], annot[i] + self:node (ell) + if a then + self:acc ' #' + self:node(a) + end + if i~=n then self:acc ', ' end + end + self:acc " = " + self:list (rhs, ", ") end end @@ -294,7 +330,7 @@ function M:If (node) self:nldedent () end -- odd number of children --> last one is an `else' clause -- - if #node%2 == 1 then + if #node%2 == 1 then self:acc "else" self:nlindent () self:list (node[#node], self.nl) @@ -334,17 +370,30 @@ function M:Forin (node, vars, generators, body) self:acc "end" end -function M:Local (node, lhs, rhs) - if next (lhs) then - self:acc "local " - self:list (lhs, ", ") - if rhs[1] then - self:acc " = " - self:list (rhs, ", ") - end - else -- Can't create a local statement with 0 variables in plain Lua - self:acc (table.tostring (node, 'nohash', 80)) - end +function M:Local (node, lhs, rhs, annots) + if next (lhs) then + self:acc "local " + if annots then + local n = #lhs + for i=1, n do + self:node (lhs) + local a = annots[i] + if a then + self:acc ' #' + self:node (a) + end + if i~=n then self:acc ', ' end + end + else + self:list (lhs, ", ") + end + if rhs[1] then + self:acc " = " + self:list (rhs, ", ") + end + else -- Can't create a local statement with 0 variables in plain Lua + self:acc (table.tostring (node, 'nohash', 80)) + end end function M:Localrec (node, lhs, rhs) @@ -362,7 +411,7 @@ function M:Localrec (node, lhs, rhs) self:nldedent () self:acc "end" - | _ -> + | _ -> -- Other localrec are unprintable ==> splice them -- -- This cannot happen in a plain Lua AST. -- self:acc "-{ " @@ -422,14 +471,27 @@ function M:String (node, str) self:acc (string.format ("%q", str):gsub ("\\\n", "\\n")) end -function M:Function (node, params, body) - self:acc "function (" - self:list (params, ", ") - self:acc ")" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" +function M:Function (node, params, body, annots) + self:acc "function (" + if annots then + local n = #params + for i=1,n do + local p, a = params[i], annots[i] + self:node(p) + if annots then + self:acc " #" + self:node(a) + end + if i~=n then self:acc ', ' end + end + else + self:list (params, ", ") + end + self:acc ")" + self:nlindent () + self:list (body, self.nl) + self:nldedent () + self:acc "end" end function M:Table (node) @@ -451,7 +513,7 @@ function M:Table (node) self:acc "] = " self:node (value) - | _ -> + | _ -> -- ``value''. -- self:node (elem) end @@ -469,7 +531,7 @@ function M:Op (node, op, a, b) -- Transform ``not (a == b)'' into ``a ~= b''. -- match node with | `Op{ "not", `Op{ "eq", _a, _b } } - | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } -> + | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } -> op, a, b = "ne", _a, _b | _ -> end @@ -496,7 +558,7 @@ function M:Op (node, op, a, b) self:node (b) self:acc (right_paren and ")") - else -- unary operator. + else -- unary operator. local paren match a with | `Op{ op_a, ... } if op_prec[op] >= op_prec[op_a] -> paren = true @@ -528,11 +590,11 @@ function M:Index (node, table, key) self:acc (paren_table and ")") match key with - | `String{ field } if is_ident (field) -> + | `String{ field } if is_ident (field) -> -- ``table.key''. -- self:acc "." self:acc (field) - | _ -> + | _ -> -- ``table [key]''. -- self:acc "[" self:node (key) @@ -548,6 +610,84 @@ function M:Id (node, name) self:acc "-{`Id " self:String (node, name) self:acc "}" - end + end +end + + +M.TDyn = '*' +M.TDynbar = '**' +M.TPass = 'pass' +M.TField = 'field' +M.TIdbar = M.TId +M.TReturn = M.Return + + +function M:TId (node, name) self:acc(name) end + + +function M:TCatbar(node, te, tebar) + self:acc'(' + self:node(te) + self:acc'|' + self:tebar(tebar) + self:acc')' +end + +function M:TFunction(node, p, r) + self:tebar(p) + self:acc '->' + self:tebar(r) +end + +function M:TTable (node, default, pairs) + self:acc '[' + self:list (pairs, ', ') + if default.tag~='TField' then + self:acc '|' + self:node (default) + end + self:acc ']' +end + +function M:TPair (node, k, v) + self:node (k) + self:acc '=' + self:node (v) +end + +function M:TIdbar (node, name) + self :acc (name) +end + +function M:TCatbar (node, a, b) + self:node(a) + self:acc ' ++ ' + self:node(b) +end + +function M:tebar(node) + if node.tag then self:node(node) else + self:acc '(' + self:list(node, ', ') + self:acc ')' + end +end + +function M:TUnkbar(node, name) + self:acc '~~' + self:acc (name) +end + +function M:TUnk(node, name) + self:acc '~' + self:acc (name) +end + +for name, tag in pairs{ const='TConst', var='TVar', currently='TCurrently', just='TJust' } do + M[tag] = function(self, node, te) + self:acc (name..' ') + self:node(te) + end end +return (|x| M.run(x)) diff --git a/metalua/compiler/bytecode.lua b/metalua/compiler/bytecode.lua new file mode 100644 index 0000000..b3afbdb --- /dev/null +++ b/metalua/compiler/bytecode.lua @@ -0,0 +1,29 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +local compile = require 'metalua.compiler.bytecode.compile' +local ldump = require 'metalua.compiler.bytecode.ldump' + +local M = { } + +M.ast_to_proto = compile.ast_to_proto +M.proto_to_bytecode = ldump.dump_string +M.proto_to_file = ldump.dump_file + +return M \ No newline at end of file diff --git a/src/compiler/compile.lua b/metalua/compiler/bytecode/compile.lua similarity index 93% rename from src/compiler/compile.lua rename to metalua/compiler/bytecode/compile.lua index 32c817c..011517f 100644 --- a/src/compiler/compile.lua +++ b/metalua/compiler/bytecode/compile.lua @@ -1,42 +1,34 @@ ----------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Kein-Hong Man, Fabien Fleutot and others. -- --- WARNING! You're entering a hackish area, proceed at your own risks! +-- All rights reserved. -- --- This code partly results from the borrowing, then ruthless abuse, of --- Yueliang's implementation of Lua 5.0 compiler. I claim --- responsibility for all of the ugly, dirty stuff that you might spot --- in it. +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Eventually, this code will be rewritten, either in Lua or more --- probably in C. Meanwhile, if you're interested into digging --- metalua's sources, this is not the best part to invest your time --- on. +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html -- --- End of warning. +-- Contributors: +-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang +-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua -- ----------------------------------------------------------------------- +------------------------------------------------------------------------------- ----------------------------------------------------------------------- --- Metalua. --- --- Summary: Compile ASTs to Lua 5.1 VM function prototype. --- Largely based on: --- --- * Yueliang (http://luaforge.net/projects/yueliang), --- yueliang-0.1.2/orig-5.0.2/lparser.lua --- --- * Lua 5.1 sources (http://www.lua.org), src/lparser.c --- ---------------------------------------------------------------------- -- --- Copyright (c) 2006-2008, Fabien Fleutot . +-- This code mainly results from the borrowing, then ruthless abuse, of +-- Yueliang's implementation of Lua 5.0 compiler. -- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- +--------------------------------------------------------------------- -module ("bytecode", package.seeall) +local pp = require 'metalua.pprint' + +local luaK = require 'metalua.compiler.bytecode.lcode' +local luaP = require 'metalua.compiler.bytecode.lopcodes' local debugf = function() end --local debugf=printf @@ -44,16 +36,18 @@ local debugf = function() end local stat = { } local expr = { } -MAX_INT = 2147483645 -- INT_MAX-2 for 32-bit systems (llimits.h) -MAXVARS = 200 -- (llimits.h) -MAXUPVALUES = 32 -- (llimits.h) -MAXPARAMS = 100 -- (llimits.h) -LUA_MAXPARSERLEVEL = 200 -- (llimits.h) +local M = { } + +M.MAX_INT = 2147483645 -- INT_MAX-2 for 32-bit systems (llimits.h) +M.MAXVARS = 200 -- (llimits.h) +M.MAXUPVALUES = 32 -- (llimits.h) +M.MAXPARAMS = 100 -- (llimits.h) +M.LUA_MAXPARSERLEVEL = 200 -- (llimits.h) -- from lobject.h -VARARG_HASARG = 1 -VARARG_ISVARARG = 2 -VARARG_NEEDSARG = 4 +M.VARARG_HASARG = 1 +M.VARARG_ISVARARG = 2 +M.VARARG_NEEDSARG = 4 local function hasmultret (k) return k=="VCALL" or k=="VVARARG" @@ -106,7 +100,7 @@ end -- since [fs.nactvar] and [fs.freereg] aren't updated. ----------------------------------------------------------------------- local function registerlocalvar (fs, varname) - debugf("[locvar: %s = reg %i]", varname, fs.nlocvars) + --debugf("[locvar: %s = reg %i]", varname, fs.nlocvars) local f = fs.f f.locvars[fs.nlocvars] = { } -- LocVar f.locvars[fs.nlocvars].varname = varname @@ -264,7 +258,7 @@ local function close_func (fs) f.sizeupvalues = f.nups assert (fs.bl == nil) if next(fs.forward_gotos) then - local x = table.tostring(fs.forward_gotos) + local x = pp.tostring(fs.forward_gotos) error ("Unresolved goto: "..x) end end @@ -286,7 +280,7 @@ end ------------------------------------------------------------------------ -- FIXME: is there a need for f=fs.f? if yes, why not always using it? ------------------------------------------------------------------------ -function indexupvalue(fs, name, v) +local function indexupvalue(fs, name, v) local f = fs.f for i = 0, f.nups - 1 do if fs.upvalues[i].k == v.k and fs.upvalues[i].info == v.info then @@ -393,7 +387,7 @@ local function funcargs (fs, ast, v, idx_from) end init_exp(v, "VCALL", luaK:codeABC(fs, "OP_CALL", base, nparams + 1, 2)) if ast.lineinfo then - luaK:fixline(fs, ast.lineinfo.first[1]) + luaK:fixline(fs, ast.lineinfo.first.line) else luaK:fixline(fs, ast.line) end @@ -477,7 +471,7 @@ end ------------------------------------------------------------------------ local function new_localvar (fs, name, n) assert (type (name) == "string") - if fs.nactvar + n > MAXVARS then error ("too many local vars") end + if fs.nactvar + n > M.MAXVARS then error ("too many local vars") end fs.actvar[fs.nactvar + n] = registerlocalvar (fs, name) --printf("[NEW_LOCVAR] %i = %s", fs.nactvar+n, name) end @@ -493,9 +487,9 @@ local function parlist (fs, ast_params) new_localvar (fs, ast_params[i][1], i-1) end -- from [code_param]: - --checklimit (fs, fs.nactvar, self.MAXPARAMS, "parameters") + --checklimit (fs, fs.nactvar, self.M.MAXPARAMS, "parameters") fs.f.numparams = fs.nactvar - fs.f.is_vararg = dots and VARARG_ISVARARG or 0 + fs.f.is_vararg = dots and M.VARARG_ISVARARG or 0 adjustlocalvars (fs, nparams) fs.f.numparams = fs.nactvar --FIXME vararg must be taken in account luaK:reserveregs (fs, fs.nactvar) -- reserve register for parameters @@ -530,7 +524,7 @@ end ------------------------------------------------------------------------ local function enterlevel (fs) fs.nestlevel = fs.nestlevel + 1 - assert (fs.nestlevel <= LUA_MAXPARSERLEVEL, "too many syntax levels") + assert (fs.nestlevel <= M.LUA_MAXPARSERLEVEL, "too many syntax levels") end ------------------------------------------------------------------------ @@ -673,8 +667,8 @@ end ------------------------------------------------------------------------ function stat.stat (fs, ast) - if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end - -- debugf (" - Statement %s", disp.ast (ast) ) + if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end + --debugf (" - Statement %s", table.tostring (ast) ) if not ast.tag then chunk (fs, ast) else @@ -683,8 +677,7 @@ function stat.stat (fs, ast) error ("A statement cannot have tag `"..ast.tag) end parser (fs, ast) end - --debugf (" - /Statement `%s", ast.tag or "") - debugf (" - /Statement `%s", ast.tag) + --debugf (" - /Statement `%s", ast.tag) end ------------------------------------------------------------------------ @@ -694,7 +687,7 @@ stat.Do = block ------------------------------------------------------------------------ function stat.Break (fs, ast) - -- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] + -- if ast.lineinfo then fs.lastline = ast.lineinfo.last.line local bl, upval = fs.bl, false while bl and not bl.isbreakable do if bl.upval then upval = true end @@ -725,10 +718,8 @@ function stat.Return (fs, ast) end first = fs.nactvar nret = luaK.LUA_MULTRET -- return all values - elseif nret == 1 then - --printf("[RETURN] 1 val: e=%s", tostringv(e)) + elseif nret == 1 then first = luaK:exp2anyreg(fs, e) - --printf("[RETURN] 1 val in reg %i", first) else --printf("* Return multiple vals in nextreg %i", fs.freereg) luaK:exp2nextreg(fs, e) -- values must go to the 'stack' @@ -893,7 +884,7 @@ function stat.Set (fs, ast) local legal = { VLOCAL=1, VUPVAL=1, VGLOBAL=1, VINDEXED=1 } --printv(lhs) if not legal [lhs.v.k] then - error ("Bad lhs expr: "..table.tostring(ast_lhs)) + error ("Bad lhs expr: "..pp.tostring(ast_lhs)) end if nvars < #ast_lhs then -- this is not the last lhs local nv = { v = { }, prev = lhs } @@ -1043,18 +1034,19 @@ end function expr.expr (fs, ast, v) if type(ast) ~= "table" then - error ("Expr AST expected, got "..table.tostring(ast)) end + error ("Expr AST expected, got "..pp.tostring(ast)) end - if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end + if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end - --debugf (" - Expression %s", tostringv (ast)) + --debugf (" - Expression %s", table.tostring (ast)) local parser = expr[ast.tag] if parser then parser (fs, ast, v) elseif not ast.tag then - error ("No tag in expression "..table.tostring(ast, 'nohash', 80)) + error ("No tag in expression ".. + pp.tostring(ast, {line_max=80, hide_hash=1, metalua_tag=1})) else error ("No parser for node `"..ast.tag) end - debugf (" - /`%s", ast.tag) + --debugf (" - /Expression `%s", ast.tag) end ------------------------------------------------------------------------ @@ -1077,8 +1069,8 @@ function expr.Dots (fs, ast, v) assert (fs.f.is_vararg ~= 0, "No vararg in this function") -- NEEDSARG flag is set if and only if the function is a vararg, -- but no vararg has been used yet in its code. - if fs.f.is_vararg < VARARG_NEEDSARG then - fs.f.is_varag = fs.f.is_vararg - VARARG_NEEDSARG end + if fs.f.is_vararg < M.VARARG_NEEDSARG then + fs.f.is_varag = fs.f.is_vararg - M.VARARG_NEEDSARG end init_exp (v, "VVARARG", luaK:codeABC (fs, "OP_VARARG", 0, 1, 0)) end @@ -1107,12 +1099,12 @@ end ------------------------------------------------------------------------ function expr.Function (fs, ast, v) - if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end + if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end local new_fs = open_func(fs) if ast.lineinfo then new_fs.f.lineDefined, new_fs.f.lastLineDefined = - ast.lineinfo.first[1], ast.lineinfo.last[1] + ast.lineinfo.first.line, ast.lineinfo.last.line end parlist (new_fs, ast[1]) chunk (new_fs, ast[2]) @@ -1123,7 +1115,7 @@ end ------------------------------------------------------------------------ function expr.Op (fs, ast, v) - if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end + if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end local op = ast[1] if #ast == 2 then @@ -1165,10 +1157,10 @@ end function expr.Index (fs, ast, v) if #ast ~= 2 then print"\n\nBAD INDEX AST:" - table.print(ast) + pp.print(ast) error "generalized indexes not implemented" end - if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end + if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end --assert(fs.lastline ~= 0, ast.tag) @@ -1211,7 +1203,7 @@ function expr.Stat (fs, ast, v) -- "local foo = -{`Stat{...}}", variable foo will be messed up by -- the compilation of `Stat. -- FIX: save the active variables at indices >= nactvar in - -- save_actvar, and restore them after `Stat has been computer. + -- save_actvar, and restore them after `Stat has been computed. -- -- I use a while rather than for loops and length operators because -- fs.actvar is a 0-based array... @@ -1253,19 +1245,19 @@ function expr.Stat (fs, ast, v) --printf(" * End of Stat") end - - ------------------------------------------------------------------------ -- Main function: ast --> proto ------------------------------------------------------------------------ -function metalua_compile (ast, source) +function M.ast_to_proto (ast, source) local fs = open_func (nil) - fs.f.is_vararg = VARARG_ISVARARG + fs.f.is_vararg = M.VARARG_ISVARARG chunk (fs, ast) close_func (fs) assert (fs.prev == nil) assert (fs.f.nups == 0) assert (fs.nestlevel == 0) if source then fs.f.source = source end - return fs.f + return fs.f, source end + +return M \ No newline at end of file diff --git a/src/compiler/lcode.lua b/metalua/compiler/bytecode/lcode.lua similarity index 97% rename from src/compiler/lcode.lua rename to metalua/compiler/bytecode/lcode.lua index c4af684..ede1a1c 100644 --- a/src/compiler/lcode.lua +++ b/metalua/compiler/bytecode/lcode.lua @@ -1,20 +1,22 @@ ----------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others. -- --- WARNING! You're entering a hackish area, proceed at your own risks! +-- All rights reserved. -- --- This code results from the borrowing, then ruthless abuse, of --- Yueliang's implementation of Lua 5.0 compiler. I claim --- responsibility for all of the ugly, dirty stuff that you might spot --- in it. +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Eventually, this code will be rewritten, either in Lua or more --- probably in C. Meanwhile, if you're interested into digging --- metalua's sources, this is not the best part to invest your time --- on. +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html -- --- End of warning. +-- Contributors: +-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang +-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua -- ----------------------------------------------------------------------- +------------------------------------------------------------------------------- --[[-------------------------------------------------------------------- @@ -52,11 +54,11 @@ -- luaK:sethvalue(o) (from lobject.h) ----------------------------------------------------------------------]] -module("bytecode", package.seeall) +local luaP = require 'metalua.compiler.bytecode.lopcodes' local function debugf() end -luaK = {} +local luaK = { } luaK.MAXSTACK = 250 -- (llimits.h, used in lcode.lua) luaK.LUA_MULTRET = -1 -- (lua.h) @@ -160,7 +162,7 @@ function luaK:fixjump(fs, pc, dest) local offset = dest - (pc + 1) assert(dest ~= self.NO_JUMP) if math.abs(offset) > luaP.MAXARG_sBx then - luaX:syntaxerror(fs.ls, "control structure too long") + error("control structure too long") end luaP:SETARG_sBx(jmp, offset) end @@ -315,7 +317,7 @@ function luaK:checkstack(fs, n) local newstack = fs.freereg + n if newstack > fs.f.maxstacksize then if newstack >= luaK.MAXSTACK then - luaX:syntaxerror(fs.ls, "function or expression too complex") + error("function or expression too complex") end fs.f.maxstacksize = newstack end @@ -1032,3 +1034,5 @@ function luaK:setlist (fs, base, nelems, tostore) end fs.freereg = base + 1 end + +return luaK \ No newline at end of file diff --git a/src/compiler/ldump.lua b/metalua/compiler/bytecode/ldump.lua similarity index 92% rename from src/compiler/ldump.lua rename to metalua/compiler/bytecode/ldump.lua index 3e07f70..6ac7617 100644 --- a/src/compiler/ldump.lua +++ b/metalua/compiler/bytecode/ldump.lua @@ -1,25 +1,25 @@ ----------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others. -- --- WARNING! You're entering a hackish area, proceed at your own risks! +-- All rights reserved. -- --- This code results from the borrowing, then ruthless abuse, of --- Yueliang's implementation of Lua 5.0 compiler. I claim --- responsibility for all of the ugly, dirty stuff that you might spot --- in it. +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Eventually, this code will be rewritten, either in Lua or more --- probably in C. Meanwhile, if you're interested into digging --- metalua's sources, this is not the best part to invest your time --- on. +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html -- --- End of warning. +-- Contributors: +-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang +-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua -- ----------------------------------------------------------------------- +------------------------------------------------------------------------------- --[[-------------------------------------------------------------------- - $Id$ - ldump.lua Save bytecodes in Lua This file is part of Yueliang. @@ -56,9 +56,11 @@ -- luaU:ttype(o) (from lobject.h) ----------------------------------------------------------------------]] -module("bytecode", package.seeall) +local luaP = require 'metalua.compiler.bytecode.lopcodes' + +local M = { } -format = { } +local format = { } format.header = string.dump(function()end):sub(1, 12) format.little_endian, format.int_size, format.size_t_size, format.instr_size, @@ -70,7 +72,10 @@ assert(format.integral or format.number_size==8, "Number format not supported by assert(format.little_endian, "Big endian architectures not supported by dumper") --requires luaP -luaU = {} +local luaU = { } +M.luaU = luaU + +luaU.format = format -- constants used by dumper luaU.LUA_TNIL = 0 @@ -229,21 +234,21 @@ function luaU:DumpByte(y, D) end ------------------------------------------------------------------------ --- dumps a 32-bit signed integer (for int) +-- dumps a signed integer of size `format.int_size` (for int) ------------------------------------------------------------------------ function luaU:DumpInt(x, D) self:DumpBlock(self:from_int(x, format.int_size), D) end ------------------------------------------------------------------------ --- dumps a 32-bit unsigned integer (for size_t) +-- dumps an unsigned integer of size `format.size_t_size` (for size_t) ------------------------------------------------------------------------ function luaU:DumpSize(x, D) self:DumpBlock(self:from_int(x, format.size_t_size), D) end ------------------------------------------------------------------------ --- dumps a LUA_NUMBER (hard-coded as a double) +-- dumps a LUA_NUMBER; can be an int or double depending on the VM. ------------------------------------------------------------------------ function luaU:DumpNumber(x, D) if format.integral then @@ -423,7 +428,7 @@ function luaU:endianness() end -- FIXME: ugly concat-base generation in [make_setS], bufferize properly! -function dump_string (proto) +function M.dump_string (proto) local writer, buff = luaU:make_setS() luaU:dump (proto, writer, buff) return buff.data @@ -431,11 +436,13 @@ end -- FIXME: [make_setS] sucks, perform synchronous file writing -- Now unused -function dump_file (proto, filename) +function M.dump_file (proto, filename) local writer, buff = luaU:make_setS() luaU:dump (proto, writer, buff) local file = io.open (filename, "wb") file:write (buff.data) io.close(file) - if UNIX_SHARPBANG then os.execute ("chmod a+x "..filename) end -end \ No newline at end of file + --if UNIX_SHARPBANG then os.execute ("chmod a+x "..filename) end +end + +return M diff --git a/src/compiler/lopcodes.lua b/metalua/compiler/bytecode/lopcodes.lua similarity index 95% rename from src/compiler/lopcodes.lua rename to metalua/compiler/bytecode/lopcodes.lua index 5e4b964..e49285e 100644 --- a/src/compiler/lopcodes.lua +++ b/metalua/compiler/bytecode/lopcodes.lua @@ -1,20 +1,22 @@ ----------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others. -- --- WARNING! You're entering a hackish area, proceed at your own risks! +-- All rights reserved. -- --- This code results from the borrowing, then ruthless abuse, of --- Yueliang's implementation of Lua 5.0 compiler. I claim --- responsibility for all of the ugly, dirty stuff that you might spot --- in it. +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Eventually, this code will be rewritten, either in Lua or more --- probably in C. Meanwhile, if you're interested into digging --- metalua's sources, this is not the best part to invest your time --- on. +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html -- --- End of warning. +-- Contributors: +-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang +-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua -- ----------------------------------------------------------------------- +------------------------------------------------------------------------------- --[[-------------------------------------------------------------------- @@ -47,11 +49,9 @@ -- endian form and field size and positions are hard-coded ----------------------------------------------------------------------]] -module("bytecode", package.seeall) - local function debugf() end -luaP = { } +local luaP = { } --[[ =========================================================================== @@ -438,3 +438,5 @@ luaP.opmodes = { "0000102", -- OP_CLOSURE "0000101" -- OP_VARARG } + +return luaP \ No newline at end of file diff --git a/metalua/compiler/globals.lua b/metalua/compiler/globals.lua new file mode 100644 index 0000000..d5f7459 --- /dev/null +++ b/metalua/compiler/globals.lua @@ -0,0 +1,86 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +--*-lua-*----------------------------------------------------------------------- +-- Override Lua's default compilation functions, so that they support Metalua +-- rather than only plain Lua +-------------------------------------------------------------------------------- + +local mlc = require 'metalua.compiler' + +local M = { } + +-- Original versions +local original_lua_versions = { + load = load, + loadfile = loadfile, + loadstring = loadstring, + dofile = dofile } + +local lua_loadstring = loadstring +local lua_loadfile = loadfile + +function M.loadstring(str, name) + if type(str) ~= 'string' then error 'string expected' end + if str:match '^\027LuaQ' then return lua_loadstring(str) end + local n = str:match '^#![^\n]*\n()' + if n then str=str:sub(n, -1) end + -- FIXME: handle erroneous returns (return nil + error msg) + return mlc.new():src_to_function(str, name) +end + +function M.loadfile(filename) + local f, err_msg = io.open(filename, 'rb') + if not f then return nil, err_msg end + local success, src = pcall( f.read, f, '*a') + pcall(f.close, f) + if success then return M.loadstring (src, '@'..filename) + else return nil, src end +end + +function M.load(f, name) + local acc = { } + while true do + local x = f() + if not x then break end + assert(type(x)=='string', "function passed to load() must return strings") + table.insert(acc, x) + end + return M.loadstring(table.concat(acc)) +end + +function M.dostring(src) + local f, msg = M.loadstring(src) + if not f then error(msg) end + return f() +end + +function M.dofile(name) + local f, msg = M.loadfile(name) + if not f then error(msg) end + return f() +end + +-- Export replacement functions as globals +for name, f in pairs(M) do _G[name] = f end + +-- To be done *after* exportation +M.lua = original_lua_versions + +return M \ No newline at end of file diff --git a/metalua/compiler/parser.lua b/metalua/compiler/parser.lua new file mode 100644 index 0000000..74997ae --- /dev/null +++ b/metalua/compiler/parser.lua @@ -0,0 +1,42 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-- Export all public APIs from sub-modules, squashed into a flat spacename + +local MT = { __type='metalua.compiler.parser' } + +local MODULE_REL_NAMES = { "annot.grammar", "expr", "meta", "misc", + "stat", "table", "ext" } + +local function new() + local M = { + lexer = require "metalua.compiler.parser.lexer" (); + extensions = { } } + for _, rel_name in ipairs(MODULE_REL_NAMES) do + local abs_name = "metalua.compiler.parser."..rel_name + local extender = require (abs_name) + if not M.extensions[abs_name] then + if type (extender) == 'function' then extender(M) end + M.extensions[abs_name] = extender + end + end + return setmetatable(M, MT) +end + +return { new = new } diff --git a/metalua/compiler/parser/annot/generator.lua b/metalua/compiler/parser/annot/generator.lua new file mode 100644 index 0000000..a8fcd62 --- /dev/null +++ b/metalua/compiler/parser/annot/generator.lua @@ -0,0 +1,48 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +require 'checks' +local gg = require 'metalua.grammar.generator' +local M = { } + +function M.opt(mlc, primary, a_type) + checks('table', 'table|function', 'string') + return gg.sequence{ + primary, + gg.onkeyword{ "#", function() return assert(mlc.annot[a_type]) end }, + builder = function(x) + local t, annot = unpack(x) + return annot and { tag='Annot', t, annot } or t + end } +end + +-- split a list of "foo" and "`Annot{foo, annot}" into a list of "foo" +-- and a list of "annot". +-- No annot list is returned if none of the elements were annotated. +function M.split(lst) + local x, a, some = { }, { }, false + for i, p in ipairs(lst) do + if p.tag=='Annot' then + some, x[i], a[i] = true, unpack(p) + else x[i] = p end + end + if some then return x, a else return lst end +end + +return M diff --git a/metalua/compiler/parser/annot/grammar.lua b/metalua/compiler/parser/annot/grammar.lua new file mode 100644 index 0000000..7ce3ec4 --- /dev/null +++ b/metalua/compiler/parser/annot/grammar.lua @@ -0,0 +1,112 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +local gg = require 'metalua.grammar.generator' + +return function(M) + local _M = gg.future(M) + M.lexer :add '->' + local A = { } + local _A = gg.future(A) + M.annot = A + + -- Type identifier: Lua keywords such as `"nil"` allowed. + function M.annot.tid(lx) + local w = lx :next() + local t = w.tag + if t=='Keyword' and w[1] :match '^[%a_][%w_]*$' or w.tag=='Id' + then return {tag='TId'; lineinfo=w.lineinfo; w[1]} + else return gg.parse_error (lx, 'tid expected') end + end + + local field_types = { var='TVar'; const='TConst'; + currently='TCurrently'; field='TField' } + + -- TODO check lineinfo + function M.annot.tf(lx) + local tk = lx:next() + local w = tk[1] + local tag = field_types[w] + if not tag then error ('Invalid field type '..w) + elseif tag=='TField' then return {tag='TField'} else + local te = M.te(lx) + return {tag=tag; te} + end + end + + M.annot.tebar_content = gg.list{ + name = 'tebar content', + primary = _A.te, + separators = { ",", ";" }, + terminators = ")" } + + M.annot.tebar = gg.multisequence{ + name = 'annot.tebar', + --{ '*', builder = 'TDynbar' }, -- maybe not user-available + { '(', _A.tebar_content, ')', + builder = function(x) return x[1] end }, + { _A.te } + } + + M.annot.te = gg.multisequence{ + name = 'annot.te', + { _A.tid, builder=function(x) return x[1] end }, + { '*', builder = 'TDyn' }, + { "[", + gg.list{ + primary = gg.sequence{ + _M.expr, "=", _A.tf, + builder = 'TPair' + }, + separators = { ",", ";" }, + terminators = { "]", "|" } }, + gg.onkeyword{ "|", _A.tf }, + "]", + builder = function(x) + local fields, other = unpack(x) + return { tag='TTable', other or {tag='TField'}, fields } + end }, -- "[ ... ]" + { '(', _A.tebar_content, ')', '->', '(', _A.tebar_content, ')', + builder = function(x) + local p, r = unpack(x) + return {tag='TFunction', p, r } + end } } + + M.annot.ts = gg.multisequence{ + name = 'annot.ts', + { 'return', _A.tebar_content, builder='TReturn' }, + { _A.tid, builder = function(x) + if x[1][1]=='pass' then return {tag='TPass'} + else error "Bad statement type" end + end } } + +-- TODO: add parsers for statements: +-- #return tebar +-- #alias = te +-- #ell = tf +--[[ + M.annot.stat_annot = gg.sequence{ + gg.list{ primary=_A.tid, separators='.' }, + '=', + XXX??, + builder = 'Annot' } +--]] + + return M.annot +end \ No newline at end of file diff --git a/metalua/compiler/parser/common.lua b/metalua/compiler/parser/common.lua new file mode 100644 index 0000000..1d6290e --- /dev/null +++ b/metalua/compiler/parser/common.lua @@ -0,0 +1,27 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-- Shared common parser table. It will be filled by parser.init(), +-- and every other module will be able to call its elements at runtime. +-- +-- If the table was directly created in parser.init, a circular +-- dependency would be created: parser.init depends on other modules to fill the table, +-- so other modules can't simultaneously depend on it. + +return { } \ No newline at end of file diff --git a/metalua/compiler/parser/expr.lua b/metalua/compiler/parser/expr.lua new file mode 100644 index 0000000..8ce4677 --- /dev/null +++ b/metalua/compiler/parser/expr.lua @@ -0,0 +1,206 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.expr()] +-- * [mlp.expr_list()] +-- * [mlp.func_val()] +-- +------------------------------------------------------------------------------- + +local pp = require 'metalua.pprint' +local gg = require 'metalua.grammar.generator' +local annot = require 'metalua.compiler.parser.annot.generator' + +return function(M) + local _M = gg.future(M) + local _table = gg.future(M, 'table') + local _meta = gg.future(M, 'meta') -- TODO move to ext? + local _annot = gg.future(M, 'annot') -- TODO move to annot + + -------------------------------------------------------------------------------- + -- Non-empty expression list. Actually, this isn't used here, but that's + -- handy to give to users. + -------------------------------------------------------------------------------- + M.expr_list = gg.list{ primary=_M.expr, separators="," } + + -------------------------------------------------------------------------------- + -- Helpers for function applications / method applications + -------------------------------------------------------------------------------- + M.func_args_content = gg.list{ + name = "function arguments", + primary = _M.expr, + separators = ",", + terminators = ")" } + + -- Used to parse methods + M.method_args = gg.multisequence{ + name = "function argument(s)", + { "{", _table.content, "}" }, + { "(", _M.func_args_content, ")", builder = unpack }, + { "+{", _meta.quote_content, "}" }, + -- TODO lineinfo? + function(lx) local r = M.opt_string(lx); return r and {r} or { } end } + + -------------------------------------------------------------------------------- + -- [func_val] parses a function, from opening parameters parenthese to + -- "end" keyword included. Used for anonymous functions as well as + -- function declaration statements (both local and global). + -- + -- It's wrapped in a [_func_val] eta expansion, so that when expr + -- parser uses the latter, they will notice updates of [func_val] + -- definitions. + -------------------------------------------------------------------------------- + M.func_params_content = gg.list{ + name="function parameters", + gg.multisequence{ { "...", builder = "Dots" }, annot.opt(M, _M.id, 'te') }, + separators = ",", terminators = {")", "|"} } + + -- TODO move to annot + M.func_val = gg.sequence{ + name = "function body", + "(", _M.func_params_content, ")", _M.block, "end", + builder = function(x) + local params, body = unpack(x) + local annots, some = { }, false + for i, p in ipairs(params) do + if p.tag=='Annot' then + params[i], annots[i], some = p[1], p[2], true + else annots[i] = false end + end + if some then return { tag='Function', params, body, annots } + else return { tag='Function', params, body } end + end } + + local func_val = function(lx) return M.func_val(lx) end + + -------------------------------------------------------------------------------- + -- Default parser for primary expressions + -------------------------------------------------------------------------------- + function M.id_or_literal (lx) + local a = lx:next() + if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then + local msg + if a.tag=='Eof' then + msg = "End of file reached when an expression was expected" + elseif a.tag=='Keyword' then + msg = "An expression was expected, and `"..a[1].. + "' can't start an expression" + else + msg = "Unexpected expr token " .. pp.tostring (a) + end + gg.parse_error (lx, msg) + end + return a + end + + + -------------------------------------------------------------------------------- + -- Builder generator for operators. Wouldn't be worth it if "|x|" notation + -- were allowed, but then lua 5.1 wouldn't compile it + -------------------------------------------------------------------------------- + + -- opf1 = |op| |_,a| `Op{ op, a } + local function opf1 (op) return + function (_,a) return { tag="Op", op, a } end end + + -- opf2 = |op| |a,_,b| `Op{ op, a, b } + local function opf2 (op) return + function (a,_,b) return { tag="Op", op, a, b } end end + + -- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed) + local function opf2r (op) return + function (a,_,b) return { tag="Op", op, b, a } end end + + local function op_ne(a, _, b) + -- This version allows to remove the "ne" operator from the AST definition. + -- However, it doesn't always produce the exact same bytecode as Lua 5.1. + return { tag="Op", "not", + { tag="Op", "eq", a, b, lineinfo= { + first = a.lineinfo.first, last = b.lineinfo.last } } } + end + + + -------------------------------------------------------------------------------- + -- + -- complete expression + -- + -------------------------------------------------------------------------------- + + -- FIXME: set line number. In [expr] transformers probably + M.expr = gg.expr { + name = "expression", + primary = gg.multisequence{ + name = "expr primary", + { "(", _M.expr, ")", builder = "Paren" }, + { "function", _M.func_val, builder = unpack }, + { "-{", _meta.splice_content, "}", builder = unpack }, + { "+{", _meta.quote_content, "}", builder = unpack }, + { "nil", builder = "Nil" }, + { "true", builder = "True" }, + { "false", builder = "False" }, + { "...", builder = "Dots" }, + { "{", _table.content, "}", builder = unpack }, + _M.id_or_literal }, + + infix = { + name = "expr infix op", + { "+", prec = 60, builder = opf2 "add" }, + { "-", prec = 60, builder = opf2 "sub" }, + { "*", prec = 70, builder = opf2 "mul" }, + { "/", prec = 70, builder = opf2 "div" }, + { "%", prec = 70, builder = opf2 "mod" }, + { "^", prec = 90, builder = opf2 "pow", assoc = "right" }, + { "..", prec = 40, builder = opf2 "concat", assoc = "right" }, + { "==", prec = 30, builder = opf2 "eq" }, + { "~=", prec = 30, builder = op_ne }, + { "<", prec = 30, builder = opf2 "lt" }, + { "<=", prec = 30, builder = opf2 "le" }, + { ">", prec = 30, builder = opf2r "lt" }, + { ">=", prec = 30, builder = opf2r "le" }, + { "and",prec = 20, builder = opf2 "and" }, + { "or", prec = 10, builder = opf2 "or" } }, + + prefix = { + name = "expr prefix op", + { "not", prec = 80, builder = opf1 "not" }, + { "#", prec = 80, builder = opf1 "len" }, + { "-", prec = 80, builder = opf1 "unm" } }, + + suffix = { + name = "expr suffix op", + { "[", _M.expr, "]", builder = function (tab, idx) + return {tag="Index", tab, idx[1]} end}, + { ".", _M.id, builder = function (tab, field) + return {tag="Index", tab, _M.id2string(field[1])} end }, + { "(", _M.func_args_content, ")", builder = function(f, args) + return {tag="Call", f, unpack(args[1])} end }, + { "{", _table.content, "}", builder = function (f, arg) + return {tag="Call", f, arg[1]} end}, + { ":", _M.id, _M.method_args, builder = function (obj, post) + local m_name, args = unpack(post) + return {tag="Invoke", obj, _M.id2string(m_name), unpack(args)} end}, + { "+{", _meta.quote_content, "}", builder = function (f, arg) + return {tag="Call", f, arg[1] } end }, + default = { name="opt_string_arg", parse = _M.opt_string, builder = function(f, arg) + return {tag="Call", f, arg } end } } } + return M +end \ No newline at end of file diff --git a/metalua/compiler/parser/ext.lua b/metalua/compiler/parser/ext.lua new file mode 100644 index 0000000..4e9d395 --- /dev/null +++ b/metalua/compiler/parser/ext.lua @@ -0,0 +1,96 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Non-Lua syntax extensions +-- +-------------------------------------------------------------------------------- + +local gg = require 'metalua.grammar.generator' + +return function(M) + + local _M = gg.future(M) + + --------------------------------------------------------------------------- + -- Algebraic Datatypes + ---------------------------------------------------------------------------- + local function adt (lx) + local node = _M.id (lx) + local tagval = node[1] + -- tagkey = `Pair{ `String "key", `String{ -{tagval} } } + local tagkey = { tag="Pair", {tag="String", "tag"}, {tag="String", tagval} } + if lx:peek().tag == "String" or lx:peek().tag == "Number" then + -- TODO support boolean litterals + return { tag="Table", tagkey, lx:next() } + elseif lx:is_keyword (lx:peek(), "{") then + local x = M.table.table (lx) + table.insert (x, 1, tagkey) + return x + else return { tag="Table", tagkey } end + end + + M.adt = gg.sequence{ "`", adt, builder = unpack } + + M.expr.primary :add(M.adt) + + ---------------------------------------------------------------------------- + -- Anonymous lambda + ---------------------------------------------------------------------------- + M.lambda_expr = gg.sequence{ + "|", _M.func_params_content, "|", _M.expr, + builder = function (x) + local li = x[2].lineinfo + return { tag="Function", x[1], + { {tag="Return", x[2], lineinfo=li }, lineinfo=li } } + end } + + M.expr.primary :add (M.lambda_expr) + + -------------------------------------------------------------------------------- + -- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell. + -------------------------------------------------------------------------------- + function M.expr_in_backquotes (lx) return M.expr(lx, 35) end -- 35=limited precedence + M.expr.infix :add{ name = "infix function", + "`", _M.expr_in_backquotes, "`", prec = 35, assoc="left", + builder = function(a, op, b) return {tag="Call", op[1], a, b} end } + + -------------------------------------------------------------------------------- + -- C-style op+assignments + -- TODO: no protection against side-effects in LHS vars. + -------------------------------------------------------------------------------- + local function op_assign(kw, op) + local function rhs(a, b) return { tag="Op", op, a, b } end + local function f(a,b) + if #a ~= #b then gg.parse_error "assymetric operator+assignment" end + local right = { } + local r = { tag="Set", a, right } + for i=1, #a do right[i] = { tag="Op", op, a[i], b[i] } end + return r + end + M.lexer :add (kw) + M.assignments[kw] = f + end + + local ops = { add='+='; sub='-='; mul='*='; div='/=' } + for ast_op_name, keyword in pairs(ops) do op_assign(keyword, ast_op_name) end + + return M +end \ No newline at end of file diff --git a/metalua/compiler/parser/lexer.lua b/metalua/compiler/parser/lexer.lua new file mode 100644 index 0000000..2b5ff7e --- /dev/null +++ b/metalua/compiler/parser/lexer.lua @@ -0,0 +1,43 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2014 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +---------------------------------------------------------------------- +-- Generate a new lua-specific lexer, derived from the generic lexer. +---------------------------------------------------------------------- + +local generic_lexer = require 'metalua.grammar.lexer' + +return function() + local lexer = generic_lexer.lexer :clone() + + local keywords = { + "and", "break", "do", "else", "elseif", + "end", "false", "for", "function", + "goto", -- Lua5.2 + "if", + "in", "local", "nil", "not", "or", "repeat", + "return", "then", "true", "until", "while", + "...", "..", "==", ">=", "<=", "~=", + "::", -- Lua5,2 + "+{", "-{" } -- Metalua + + for _, w in ipairs(keywords) do lexer :add (w) end + + return lexer +end \ No newline at end of file diff --git a/metalua/compiler/parser/meta.lua b/metalua/compiler/parser/meta.lua new file mode 100644 index 0000000..71eb3c3 --- /dev/null +++ b/metalua/compiler/parser/meta.lua @@ -0,0 +1,138 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2014 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-- Compile-time metaprogramming features: splicing ASTs generated during compilation, +-- AST quasi-quoting helpers. + +local gg = require 'metalua.grammar.generator' + +return function(M) + local _M = gg.future(M) + M.meta={ } + local _MM = gg.future(M.meta) + + -------------------------------------------------------------------------------- + -- External splicing: compile an AST into a chunk, load and evaluate + -- that chunk, and replace the chunk by its result (which must also be + -- an AST). + -------------------------------------------------------------------------------- + + -- TODO: that's not part of the parser + function M.meta.eval (ast) + -- TODO: should there be one mlc per splice, or per parser instance? + local mlc = require 'metalua.compiler'.new() + local f = mlc :ast_to_function (ast, '=splice') + local result=f(M) -- splices act on the current parser + return result + end + + ---------------------------------------------------------------------------- + -- Going from an AST to an AST representing that AST + -- the only hash-part key being lifted is `"tag"`. + -- Doesn't lift subtrees protected inside a `Splice{ ... }. + -- e.g. change `Foo{ 123 } into + -- `Table{ `Pair{ `String "tag", `String "foo" }, `Number 123 } + ---------------------------------------------------------------------------- + local function lift (t) + --print("QUOTING:", table.tostring(t, 60,'nohash')) + local cases = { } + function cases.table (t) + local mt = { tag = "Table" } + --table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } }) + if t.tag == "Splice" then + assert (#t==1, "Invalid splice") + local sp = t[1] + return sp + elseif t.tag then + table.insert (mt, { tag="Pair", lift "tag", lift(t.tag) }) + end + for _, v in ipairs (t) do + table.insert (mt, lift(v)) + end + return mt + end + function cases.number (t) return { tag = "Number", t, quote = true } end + function cases.string (t) return { tag = "String", t, quote = true } end + function cases.boolean (t) return { tag = t and "True" or "False", t, quote = true } end + local f = cases [type(t)] + if f then return f(t) else error ("Cannot quote an AST containing "..tostring(t)) end + end + M.meta.lift = lift + + -------------------------------------------------------------------------------- + -- when this variable is false, code inside [-{...}] is compiled and + -- avaluated immediately. When it's true (supposedly when we're + -- parsing data inside a quasiquote), [-{foo}] is replaced by + -- [`Splice{foo}], which will be unpacked by [quote()]. + -------------------------------------------------------------------------------- + local in_a_quote = false + + -------------------------------------------------------------------------------- + -- Parse the inside of a "-{ ... }" + -------------------------------------------------------------------------------- + function M.meta.splice_content (lx) + local parser_name = "expr" + if lx:is_keyword (lx:peek(2), ":") then + local a = lx:next() + lx:next() -- skip ":" + assert (a.tag=="Id", "Invalid splice parser name") + parser_name = a[1] + end + -- TODO FIXME running a new parser with the old lexer?! + local parser = require 'metalua.compiler.parser'.new() + local ast = parser [parser_name](lx) + if in_a_quote then -- only prevent quotation in this subtree + --printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60)) + return { tag="Splice", ast } + else -- convert in a block, eval, replace with result + if parser_name == "expr" then ast = { { tag="Return", ast } } + elseif parser_name == "stat" then ast = { ast } + elseif parser_name ~= "block" then + error ("splice content must be an expr, stat or block") end + --printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60)) + return M.meta.eval (ast) + end + end + + M.meta.splice = gg.sequence{ "-{", _MM.splice_content, "}", builder=unpack } + + -------------------------------------------------------------------------------- + -- Parse the inside of a "+{ ... }" + -------------------------------------------------------------------------------- + function M.meta.quote_content (lx) + local parser + if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content } + local parser_name = M.id(lx)[1] + parser = M[parser_name] + lx:next() -- skip ":" + else -- +{ content } + parser = M.expr + end + + local prev_iq = in_a_quote + in_a_quote = true + --print("IN_A_QUOTE") + local content = parser (lx) + local q_content = M.meta.lift (content) + in_a_quote = prev_iq + return q_content + end + + return M +end \ No newline at end of file diff --git a/metalua/compiler/parser/misc.lua b/metalua/compiler/parser/misc.lua new file mode 100644 index 0000000..a24b006 --- /dev/null +++ b/metalua/compiler/parser/misc.lua @@ -0,0 +1,175 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +------------------------------------------------------------------------------- +-- +-- Summary: metalua parser, miscellaneous utility functions. +-- +------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [mlp.fget()] +-- * [mlp.id()] +-- * [mlp.opt_id()] +-- * [mlp.id_list()] +-- * [mlp.string()] +-- * [mlp.opt_string()] +-- * [mlp.id2string()] +-- +-------------------------------------------------------------------------------- + +local gg = require 'metalua.grammar.generator' + +-- TODO: replace splice-aware versions with naive ones, move etensions in ./meta + +return function(M) + local _M = gg.future(M) + +--[[ metaprog-free versions: + function M.id(lx) + if lx:peek().tag~='Id' then gg.parse_error(lx, "Identifier expected") + else return lx:next() end + end + + function M.opt_id(lx) + if lx:peek().tag~='Id' then return lx:next() else return false end + end + + function M.string(lx) + if lx:peek().tag~='String' then gg.parse_error(lx, "String expected") + else return lx:next() end + end + + function M.opt_string(lx) + if lx:peek().tag~='String' then return lx:next() else return false end + end + + -------------------------------------------------------------------------------- + -- Converts an identifier into a string. Hopefully one day it'll handle + -- splices gracefully, but that proves quite tricky. + -------------------------------------------------------------------------------- + function M.id2string (id) + if id.tag == "Id" then id.tag = "String"; return id + else error ("Identifier expected: "..table.tostring(id, 'nohash')) end + end +--]] + + -------------------------------------------------------------------------------- + -- Try to read an identifier (possibly as a splice), or return [false] if no + -- id is found. + -------------------------------------------------------------------------------- + function M.opt_id (lx) + local a = lx:peek(); + if lx:is_keyword (a, "-{") then + local v = M.meta.splice(lx) + if v.tag ~= "Id" and v.tag ~= "Splice" then + gg.parse_error(lx, "Bad id splice") + end + return v + elseif a.tag == "Id" then return lx:next() + else return false end + end + + -------------------------------------------------------------------------------- + -- Mandatory reading of an id: causes an error if it can't read one. + -------------------------------------------------------------------------------- + function M.id (lx) + return M.opt_id (lx) or gg.parse_error(lx,"Identifier expected") + end + + -------------------------------------------------------------------------------- + -- Common helper function + -------------------------------------------------------------------------------- + M.id_list = gg.list { primary = _M.id, separators = "," } + + -------------------------------------------------------------------------------- + -- Converts an identifier into a string. Hopefully one day it'll handle + -- splices gracefully, but that proves quite tricky. + -------------------------------------------------------------------------------- + function M.id2string (id) + --print("id2string:", disp.ast(id)) + if id.tag == "Id" then id.tag = "String"; return id + elseif id.tag == "Splice" then + error ("id2string on splice not implemented") + -- Evaluating id[1] will produce `Id{ xxx }, + -- and we want it to produce `String{ xxx }. + -- The following is the plain notation of: + -- +{ `String{ `Index{ `Splice{ -{id[1]} }, `Number 1 } } } + return { tag="String", { tag="Index", { tag="Splice", id[1] }, + { tag="Number", 1 } } } + else error ("Identifier expected: "..table.tostring(id, 'nohash')) end + end + + -------------------------------------------------------------------------------- + -- Read a string, possibly spliced, or return an error if it can't + -------------------------------------------------------------------------------- + function M.string (lx) + local a = lx:peek() + if lx:is_keyword (a, "-{") then + local v = M.meta.splice(lx) + if v.tag ~= "String" and v.tag ~= "Splice" then + gg.parse_error(lx,"Bad string splice") + end + return v + elseif a.tag == "String" then return lx:next() + else error "String expected" end + end + + -------------------------------------------------------------------------------- + -- Try to read a string, or return false if it can't. No splice allowed. + -------------------------------------------------------------------------------- + function M.opt_string (lx) + return lx:peek().tag == "String" and lx:next() + end + + -------------------------------------------------------------------------------- + -- Chunk reader: block + Eof + -------------------------------------------------------------------------------- + function M.skip_initial_sharp_comment (lx) + -- Dirty hack: I'm happily fondling lexer's private parts + -- FIXME: redundant with lexer:newstream() + lx :sync() + local i = lx.src:match ("^#.-\n()", lx.i) + if i then + lx.i = i + lx.column_offset = i + lx.line = lx.line and lx.line + 1 or 1 + end + end + + local function chunk (lx) + if lx:peek().tag == 'Eof' then + return { } -- handle empty files + else + M.skip_initial_sharp_comment (lx) + local chunk = M.block (lx) + if lx:peek().tag ~= "Eof" then + gg.parse_error(lx, "End-of-file expected") + end + return chunk + end + end + + -- chunk is wrapped in a sequence so that it has a "transformer" field. + M.chunk = gg.sequence { chunk, builder = unpack } + + return M +end \ No newline at end of file diff --git a/metalua/compiler/parser/stat.lua b/metalua/compiler/parser/stat.lua new file mode 100644 index 0000000..5d5e3a9 --- /dev/null +++ b/metalua/compiler/parser/stat.lua @@ -0,0 +1,279 @@ +------------------------------------------------------------------------------ +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +------------------------------------------------------------------------------- +-- +-- Summary: metalua parser, statement/block parser. This is part of the +-- definition of module [mlp]. +-- +------------------------------------------------------------------------------- + +------------------------------------------------------------------------------- +-- +-- Exports API: +-- * [mlp.stat()] +-- * [mlp.block()] +-- * [mlp.for_header()] +-- +------------------------------------------------------------------------------- + +local lexer = require 'metalua.grammar.lexer' +local gg = require 'metalua.grammar.generator' + +local annot = require 'metalua.compiler.parser.annot.generator' + +-------------------------------------------------------------------------------- +-- List of all keywords that indicate the end of a statement block. Users are +-- likely to extend this list when designing extensions. +-------------------------------------------------------------------------------- + + +return function(M) + local _M = gg.future(M) + + M.block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" } + + -- FIXME: this must be handled from within GG!!! + -- FIXME: there's no :add method in the list anyway. Added by gg.list?! + function M.block_terminators :add(x) + if type (x) == "table" then for _, y in ipairs(x) do self :add (y) end + else table.insert (self, x) end + end + + ---------------------------------------------------------------------------- + -- list of statements, possibly followed by semicolons + ---------------------------------------------------------------------------- + M.block = gg.list { + name = "statements block", + terminators = M.block_terminators, + primary = function (lx) + -- FIXME use gg.optkeyword() + local x = M.stat (lx) + if lx:is_keyword (lx:peek(), ";") then lx:next() end + return x + end } + + ---------------------------------------------------------------------------- + -- Helper function for "return " parsing. + -- Called when parsing return statements. + -- The specific test for initial ";" is because it's not a block terminator, + -- so without it gg.list would choke on "return ;" statements. + -- We don't make a modified copy of block_terminators because this list + -- is sometimes modified at runtime, and the return parser would get out of + -- sync if it was relying on a copy. + ---------------------------------------------------------------------------- + local return_expr_list_parser = gg.multisequence{ + { ";" , builder = function() return { } end }, + default = gg.list { + _M.expr, separators = ",", terminators = M.block_terminators } } + + + local for_vars_list = gg.list{ + name = "for variables list", + primary = _M.id, + separators = ",", + terminators = "in" } + + ---------------------------------------------------------------------------- + -- for header, between [for] and [do] (exclusive). + -- Return the `Forxxx{...} AST, without the body element (the last one). + ---------------------------------------------------------------------------- + function M.for_header (lx) + local vars = M.id_list(lx) + if lx :is_keyword (lx:peek(), "=") then + if #vars ~= 1 then + gg.parse_error (lx, "numeric for only accepts one variable") + end + lx:next() -- skip "=" + local exprs = M.expr_list (lx) + if #exprs < 2 or #exprs > 3 then + gg.parse_error (lx, "numeric for requires 2 or 3 boundaries") + end + return { tag="Fornum", vars[1], unpack (exprs) } + else + if not lx :is_keyword (lx :next(), "in") then + gg.parse_error (lx, '"=" or "in" expected in for loop') + end + local exprs = M.expr_list (lx) + return { tag="Forin", vars, exprs } + end + end + + ---------------------------------------------------------------------------- + -- Function def parser helper: id ( . id ) * + ---------------------------------------------------------------------------- + local function fn_builder (list) + local acc = list[1] + local first = acc.lineinfo.first + for i = 2, #list do + local index = M.id2string(list[i]) + local li = lexer.new_lineinfo(first, index.lineinfo.last) + acc = { tag="Index", acc, index, lineinfo=li } + end + return acc + end + local func_name = gg.list{ _M.id, separators = ".", builder = fn_builder } + + ---------------------------------------------------------------------------- + -- Function def parser helper: ( : id )? + ---------------------------------------------------------------------------- + local method_name = gg.onkeyword{ name = "method invocation", ":", _M.id, + transformers = { function(x) return x and x.tag=='Id' and M.id2string(x) end } } + + ---------------------------------------------------------------------------- + -- Function def builder + ---------------------------------------------------------------------------- + local function funcdef_builder(x) + local name, method, func = unpack(x) + if method then + name = { tag="Index", name, method, + lineinfo = { + first = name.lineinfo.first, + last = method.lineinfo.last } } + table.insert (func[1], 1, {tag="Id", "self"}) + end + local r = { tag="Set", {name}, {func} } + r[1].lineinfo = name.lineinfo + r[2].lineinfo = func.lineinfo + return r + end + + + ---------------------------------------------------------------------------- + -- if statement builder + ---------------------------------------------------------------------------- + local function if_builder (x) + local cond_block_pairs, else_block, r = x[1], x[2], {tag="If"} + local n_pairs = #cond_block_pairs + for i = 1, n_pairs do + local cond, block = unpack(cond_block_pairs[i]) + r[2*i-1], r[2*i] = cond, block + end + if else_block then table.insert(r, #r+1, else_block) end + return r + end + + -------------------------------------------------------------------------------- + -- produce a list of (expr,block) pairs + -------------------------------------------------------------------------------- + local elseifs_parser = gg.list { + gg.sequence { _M.expr, "then", _M.block , name='elseif parser' }, + separators = "elseif", + terminators = { "else", "end" } + } + + local annot_expr = gg.sequence { + _M.expr, + gg.onkeyword{ "#", gg.future(M, 'annot').tf }, + builder = function(x) + local e, a = unpack(x) + if a then return { tag='Annot', e, a } + else return e end + end } + + local annot_expr_list = gg.list { + primary = annot.opt(M, _M.expr, 'tf'), separators = ',' } + + ------------------------------------------------------------------------ + -- assignments and calls: statements that don't start with a keyword + ------------------------------------------------------------------------ + local function assign_or_call_stat_parser (lx) + local e = annot_expr_list (lx) + local a = lx:is_keyword(lx:peek()) + local op = a and M.assignments[a] + -- TODO: refactor annotations + if op then + --FIXME: check that [e] is a LHS + lx :next() + local annots + e, annots = annot.split(e) + local v = M.expr_list (lx) + if type(op)=="string" then return { tag=op, e, v, annots } + else return op (e, v) end + else + assert (#e > 0) + if #e > 1 then + gg.parse_error (lx, + "comma is not a valid statement separator; statement can be ".. + "separated by semicolons, or not separated at all") + elseif e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then + local typename + if e[1].tag == 'Id' then + typename = '("'..e[1][1]..'") is an identifier' + elseif e[1].tag == 'Op' then + typename = "is an arithmetic operation" + else typename = "is of type '"..(e[1].tag or "").."'" end + gg.parse_error (lx, + "This expression %s; ".. + "a statement was expected, and only function and method call ".. + "expressions can be used as statements", typename); + end + return e[1] + end + end + + M.local_stat_parser = gg.multisequence{ + -- local function + { "function", _M.id, _M.func_val, builder = + function(x) + local vars = { x[1], lineinfo = x[1].lineinfo } + local vals = { x[2], lineinfo = x[2].lineinfo } + return { tag="Localrec", vars, vals } + end }, + -- local ( = )? + default = gg.sequence{ + gg.list{ + primary = annot.opt(M, _M.id, 'tf'), + separators = ',' }, + gg.onkeyword{ "=", _M.expr_list }, + builder = function(x) + local annotated_left, right = unpack(x) + local left, annotations = annot.split(annotated_left) + return {tag="Local", left, right or { }, annotations } + end } } + + ------------------------------------------------------------------------ + -- statement + ------------------------------------------------------------------------ + M.stat = gg.multisequence { + name = "statement", + { "do", _M.block, "end", builder = + function (x) return { tag="Do", unpack (x[1]) } end }, + { "for", _M.for_header, "do", _M.block, "end", builder = + function (x) x[1][#x[1]+1] = x[2]; return x[1] end }, + { "function", func_name, method_name, _M.func_val, builder=funcdef_builder }, + { "while", _M.expr, "do", _M.block, "end", builder = "While" }, + { "repeat", _M.block, "until", _M.expr, builder = "Repeat" }, + { "local", _M.local_stat_parser, builder = unpack }, + { "return", return_expr_list_parser, builder = + function(x) x[1].tag='Return'; return x[1] end }, + { "break", builder = function() return { tag="Break" } end }, + { "-{", gg.future(M, 'meta').splice_content, "}", builder = unpack }, + { "if", gg.nonempty(elseifs_parser), gg.onkeyword{ "else", M.block }, "end", + builder = if_builder }, + default = assign_or_call_stat_parser } + + M.assignments = { + ["="] = "Set" + } + + function M.assignments:add(k, v) self[k] = v end + + return M +end \ No newline at end of file diff --git a/metalua/compiler/parser/table.lua b/metalua/compiler/parser/table.lua new file mode 100644 index 0000000..11102d9 --- /dev/null +++ b/metalua/compiler/parser/table.lua @@ -0,0 +1,77 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- +-- +-- Exported API: +-- * [M.table_bracket_field()] +-- * [M.table_field()] +-- * [M.table_content()] +-- * [M.table()] +-- +-- KNOWN BUG: doesn't handle final ";" or "," before final "}" +-- +-------------------------------------------------------------------------------- + +local gg = require 'metalua.grammar.generator' + +return function(M) + + M.table = { } + local _table = gg.future(M.table) + local _expr = gg.future(M).expr + + -------------------------------------------------------------------------------- + -- `[key] = value` table field definition + -------------------------------------------------------------------------------- + M.table.bracket_pair = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" } + + -------------------------------------------------------------------------------- + -- table element parser: list value, `id = value` pair or `[value] = value` pair. + -------------------------------------------------------------------------------- + function M.table.element (lx) + if lx :is_keyword (lx :peek(), "[") then return M.table.bracket_pair(lx) end + local e = M.expr (lx) + if not lx :is_keyword (lx :peek(), "=") then return e end + lx :next(); -- skip the "=" + local key = M.id2string(e) -- will fail on non-identifiers + local val = M.expr(lx) + local r = { tag="Pair", key, val } + r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last } + return r + end + + ----------------------------------------------------------------------------- + -- table constructor, without enclosing braces; returns a full table object + ----------------------------------------------------------------------------- + M.table.content = gg.list { + -- eta expansion to allow patching the element definition + primary = _table.element, + separators = { ",", ";" }, + terminators = "}", + builder = "Table" } + + -------------------------------------------------------------------------------- + -- complete table constructor including [{...}] + -------------------------------------------------------------------------------- + -- TODO beware, stat and expr use only table.content, this can't be patched. + M.table.table = gg.sequence{ "{", _table.content, "}", builder = unpack } + + return M +end \ No newline at end of file diff --git a/metalua/dollar.mlua b/metalua/dollar.mlua new file mode 100644 index 0000000..7756e92 --- /dev/null +++ b/metalua/dollar.mlua @@ -0,0 +1,31 @@ +-{ extension ('match', ...) } + +local M = { } + +M.register = { } + +local function dollar_builder(e) + match e with + | `Call{ `Id{name}, ... } -> + local entry = M.register[name] or error ("No macro "..name.." registered") + return entry(select(2, unpack(e))) + | `Id{name} -> + local entry = dollar[name] or error ("No macro "..name.." registered") + match type(entry) with + | 'function' -> return entry() + | 'table' -> return entry -- constant AST + | t -> error ("Invalid macro type "..t) + end + | _ -> error "Invalid $macro, '$' must be followed by an identifier or function call" + end +end + +function M.extend(M) + local M = require 'metalua.grammar.generator' .future(M) + M.expr.prefix :add { + '$', prec = 100, builder = |_, x| dollar_builder(x) } + M.stat:add{ + '$', _M.expr, builder = |x| dollar_builder(x[1]) } +end + +return M diff --git a/metalua/extension/comprehension.mlua b/metalua/extension/comprehension.mlua new file mode 100644 index 0000000..8917b9a --- /dev/null +++ b/metalua/extension/comprehension.mlua @@ -0,0 +1,282 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- +-- +-- This extension implements list comprehensions, similar to Haskell and +-- Python syntax, to easily describe lists. +-- +-- * x[a ... b] is the list { x[a], x[a+1], ..., x[b] } +-- * { f()..., b } contains all the elements returned by f(), then b +-- (allows to expand list fields other than the last one) +-- * list comprehensions a la python, with "for" and "if" suffixes: +-- {i+10*j for i=1,3 for j=1,3 if i~=j} is { 21, 31, 12, 32, 13, 23 } +-- +------------------------------------------------------------------------------- + +-{ extension ("match", ...) } + +local SUPPORT_IMPROVED_LOOPS = true +local SUPPORT_IMPROVED_INDEXES = false -- depends on deprecated table.isub +local SUPPORT_CONTINUE = true +local SUPPORT_COMP_LISTS = true + +assert (SUPPORT_IMPROVED_LOOPS or not SUPPORT_CONTINUE, + "Can't support 'continue' without improved loop headers") + +local gg = require 'metalua.grammar.generator' +local Q = require 'metalua.treequery' + +local function dots_list_suffix_builder (x) return `DotsSuffix{ x } end + +local function for_list_suffix_builder (list_element, suffix) + local new_header = suffix[1] + match list_element with + | `Comp{ _, acc } -> table.insert (acc, new_header); return list_element + | _ -> return `Comp{ list_element, { new_header } } + end +end + +local function if_list_suffix_builder (list_element, suffix) + local new_header = `If{ suffix[1] } + match list_element with + | `Comp{ _, acc } -> table.insert (acc, new_header); return list_element + | _ -> return `Comp{ list_element, { new_header } } + end +end + +-- Builds a statement from a table element, which adds this element to +-- a table `t`, potentially thanks to an alias `tinsert` to +-- `table.insert`. +-- @param core the part around which the loops are built. +-- either `DotsSuffix{expr}, `Pair{ expr } or a plain expression +-- @param list comprehension suffixes, in the order in which they appear +-- either `Forin{ ... } or `Fornum{ ...} or `If{ ... }. In each case, +-- it misses a last child node as its body. +-- @param t a variable containing the table to fill +-- @param tinsert a variable containing `table.insert`. +-- +-- @return fill a statement which fills empty table `t` with the denoted element +local function comp_list_builder(core, list, t, tinsert) + local filler + -- 1 - Build the loop's core: if it has suffix "...", every elements of the + -- multi-return must be inserted, hence the extra [for] loop. + match core with + | `DotsSuffix{ element } -> + local x = gg.gensym() + filler = +{stat: for _, -{x} in pairs{ -{element} } do (-{tinsert})(-{t}, -{x}) end } + | `Pair{ key, value } -> + --filler = +{ -{t}[-{key}] = -{value} } + filler = `Set{ { `Index{ t, key } }, { value } } + | _ -> filler = +{ (-{tinsert})(-{t}, -{core}) } + end + + -- 2 - Stack the `if` and `for` control structures, from outside to inside. + -- This is done in a destructive way for the elements of [list]. + for i = #list, 1, -1 do + table.insert (list[i], {filler}) + filler = list[i] + end + + return filler +end + +local function table_content_builder (list) + local special = false -- Does the table need a special builder? + for _, element in ipairs(list) do + local etag = element.tag + if etag=='Comp' or etag=='DotsSuffix' then special=true; break end + end + if not special then list.tag='Table'; return list end + + local t, tinsert = gg.gensym 'table', gg.gensym 'table_insert' + local filler_block = { +{stat: local -{t}, -{tinsert} = { }, table.insert } } + for _, element in ipairs(list) do + local filler + match element with + | `Comp{ core, comp } -> filler = comp_list_builder(core, comp, t, tinsert) + | _ -> filler = comp_list_builder(element, { }, t, tinsert) + end + table.insert(filler_block, filler) + end + return `Stat{ filler_block, t } +end + + +-------------------------------------------------------------------------------- +-- Back-end for improved index operator. +local function index_builder(a, suffix) + match suffix[1] with + -- Single index, no range: keep the native semantics + | { { e, false } } -> return `Index{ a, e } + -- Either a range, or multiple indexes, or both + | ranges -> + local r = `Call{ +{table.isub}, a } + local function acc (x,y) table.insert (r,x); table.insert (r,y) end + for _, seq in ipairs (ranges) do + match seq with + | { e, false } -> acc(e,e) + | { e, f } -> acc(e,f) + end + end + return r + end +end + +------------------------------------------------------------------- +-- Find continue statements in a loop body, change them into goto +-- end-of-body. +local function transform_continue_statements(body) + local continue_statements = Q(body) + :if_unknown() -- tolerate unknown 'Continue' statements + :not_under ('Forin', 'Fornum', 'While', 'Repeat') + :filter ('Continue') + :list() + if next(continue_statements) then + local continue_label = gg.gensym 'continue' [1] + table.insert(body, `Label{ continue_label }) + for _, statement in ipairs(continue_statements) do + statement.tag = 'Goto' + statement[1] = continue_label + end + return true + else return false end +end + +------------------------------------------------------------------------------- +-- Back-end for loops with a multi-element header +local function loop_builder(x) + local first, elements, body = unpack(x) + + -- Change continue statements into gotos. + if SUPPORT_CONTINUE then transform_continue_statements(body) end + + ------------------------------------------------------------------- + -- If it's a regular loop, don't bloat the code + if not next(elements) then + table.insert(first, body) + return first + end + + ------------------------------------------------------------------- + -- There's no reason to treat the first element in a special way + table.insert(elements, 1, first) + + ------------------------------------------------------------------- + -- Change breaks into gotos that escape all loops at once. + local exit_label = nil + local function break_to_goto(break_node) + if not exit_label then exit_label = gg.gensym 'break' [1] end + break_node = break_node or { } + break_node.tag = 'Goto' + break_node[1] = exit_label + return break_node + end + Q(body) + :not_under('Function', 'Forin', 'Fornum', 'While', 'Repeat') + :filter('Break') + :foreach (break_to_goto) + + ------------------------------------------------------------------- + -- Compile all headers elements, from last to first. + -- invariant: `body` is a block (not a statement) + local result = body + for i = #elements, 1, -1 do + local e = elements[i] + match e with + | `If{ cond } -> + result = { `If{ cond, result } } + | `Until{ cond } -> + result = +{block: if -{cond} then -{break_to_goto()} else -{result} end } + | `While{ cond } -> + if i==1 then result = { `While{ cond, result } } -- top-level while + else result = +{block: if -{cond} then -{result} else -{break_to_goto()} end } end + | `Forin{ ... } | `Fornum{ ... } -> + table.insert (e, result); result={e} + | _-> require'metalua.pprint'.printf("Bad loop header element %s", e) + end + end + + + ------------------------------------------------------------------- + -- If some breaks had to be changed into gotos, insert the label + if exit_label then result = { result, `Label{ exit_label } } end + + return result +end + + +-------------------------------------------------------------------------------- +-- Improved "[...]" index operator: +-- * support for multi-indexes ("foo[bar, gnat]") +-- * support for ranges ("foo[bar ... gnat]") +-------------------------------------------------------------------------------- +local function extend(M) + + local _M = gg.future(M) + + if SUPPORT_COMP_LISTS then + -- support for "for" / "if" comprehension suffixes in literal tables + local original_table_element = M.table.element + M.table.element = gg.expr{ name="table cell", + primary = original_table_element, + suffix = { name="table cell suffix", + { "...", builder = dots_list_suffix_builder }, + { "for", _M.for_header, builder = for_list_suffix_builder }, + { "if", _M.expr, builder = if_list_suffix_builder } } } + M.table.content.builder = table_content_builder + end + + if SUPPORT_IMPROVED_INDEXES then + -- Support for ranges and multiple indices in bracket suffixes + M.expr.suffix:del '[' + M.expr.suffix:add{ name="table index/range", + "[", gg.list{ + gg.sequence { _M.expr, gg.onkeyword{ "...", _M.expr } } , + separators = { ",", ";" } }, + "]", builder = index_builder } + end + + if SUPPORT_IMPROVED_LOOPS then + local original_for_header = M.for_header + M.stat :del 'for' + M.stat :del 'while' + + M.loop_suffix = gg.multisequence{ + { 'while', _M.expr, builder = |x| `Until{ `Op{ 'not', x[1] } } }, + { 'until', _M.expr, builder = |x| `Until{ x[1] } }, + { 'if', _M.expr, builder = |x| `If{ x[1] } }, + { 'for', original_for_header, builder = |x| x[1] } } + + M.loop_suffix_list = gg.list{ _M.loop_suffix, terminators='do' } + + M.stat :add{ + 'for', original_for_header, _M.loop_suffix_list, 'do', _M.block, 'end', + builder = loop_builder } + + M.stat :add{ + 'while', _M.expr, _M.loop_suffix_list, 'do', _M.block, 'end', + builder = |x| loop_builder{ `While{x[1]}, x[2], x[3] } } + end + + if SUPPORT_CONTINUE then + M.lexer :add 'continue' + M.stat :add{ 'continue', builder='Continue' } + end +end + +return extend diff --git a/src/lib/metalua/extension/match.mlua b/metalua/extension/match.mlua similarity index 71% rename from src/lib/metalua/extension/match.mlua rename to metalua/extension/match.mlua index 6cceea7..8561e05 100644 --- a/src/lib/metalua/extension/match.mlua +++ b/metalua/extension/match.mlua @@ -1,16 +1,23 @@ ----------------------------------------------------------------------- --- Metalua samples: $Id$ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. -- --- Summary: Structural pattern matching for metalua ADT. +-- All rights reserved. -- ----------------------------------------------------------------------- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Copyright (c) 2006-2008, Fabien Fleutot . +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html -- --- This software is released under the MIT Licence, see licence.txt --- for details. +-- Contributors: +-- Fabien Fleutot - API and implementation -- --------------------------------------------------------------------------------- +------------------------------------------------------------------------------- + +------------------------------------------------------------------------------- -- -- Glossary: -- @@ -21,11 +28,11 @@ -- * pattern_group: several pattern seqs, one of them might match -- the term seq. -- * case: pattern_group * guard option * block --- * match_statement: tested term_seq * case list +-- * match_statement: tested term_seq * case list -- -- Hence a complete match statement is a: -- --- { list(expr), list{ list(list(expr)), expr or false, block } } +-- { list(expr), list{ list(list(expr)), expr or false, block } } -- -- Implementation hints -- ==================== @@ -57,21 +64,23 @@ -- Code generation is performed by acc_xxx() functions, which accumulate -- code in cfg.code: -- --- * acc_test(test, cfg) will generate a jump to cfg.on_failure +-- * acc_test(test, cfg) will generate a jump to cfg.on_failure -- *when the test returns TRUE* -- -- * acc_stat accumulates a statement --- --- * acc_assign accumulate an assignment statement, and makes sure that +-- +-- * acc_assign accumulate an assignment statement, and makes sure that -- the LHS variable the registered as local in cfg.locals. --- ----------------------------------------------------------------------- +-- +------------------------------------------------------------------------------- -- TODO: hygiene wrt type() -- TODO: cfg.ntmp isn't reset as often as it could. I'm not even sure -- the corresponding locals are declared. -module ('spmatch', package.seeall) + +local gg = require 'metalua.grammar.generator' +local pp = require 'metalua.pprint' ---------------------------------------------------------------------- -- This would have been best done through library 'metalua.walk', @@ -79,60 +88,112 @@ module ('spmatch', package.seeall) -- It replaces all instances of `...' in `ast' with `term', unless -- it appears in a function. ---------------------------------------------------------------------- -function replace_dots (ast, term) - local function rec (x) - if type(x) == 'table' then - if x.tag=='Dots' then - if term=='ambiguous' then - error ("You can't use `...' on the right of a match case when it appears ".. - "more than once on the left") - else - x <- term - end - elseif x.tag=='Function' then return - else for y in ivalues (x) do rec (y) end end - end - end - return rec (ast) +local function replace_dots (ast, term) + local function rec (node) + for i, child in ipairs(node) do + if type(child)~="table" then -- pass + elseif child.tag=='Dots' then + if term=='ambiguous' then + error ("You can't use `...' on the right of a match case when it appears ".. + "more than once on the left") + else node[i] = term end + elseif child.tag=='Function' then return nil + else rec(child) end + end + end + return rec(ast) end -tmpvar_base = mlp.gensym 'submatch.' [1] -function next_tmpvar(cfg) +local tmpvar_base = gg.gensym 'submatch.' [1] + +local function next_tmpvar(cfg) assert (cfg.ntmp, "No cfg.ntmp imbrication level in the match compiler") cfg.ntmp = cfg.ntmp+1 return `Id{ tmpvar_base .. cfg.ntmp } end -- Code accumulators -acc_stat = |x,cfg| table.insert (cfg.code, x) -acc_test = |x,cfg| acc_stat(+{stat: if -{x} then -{`Goto{cfg.on_failure}} end}, cfg) +local acc_stat = |x,cfg| table.insert (cfg.code, x) +local acc_test = |x,cfg| acc_stat(+{stat: if -{x} then -{`Goto{cfg.on_failure}} end}, cfg) -- lhs :: `Id{ string } -- rhs :: expr -function acc_assign (lhs, rhs, cfg) +local function acc_assign (lhs, rhs, cfg) assert(lhs.tag=='Id') cfg.locals[lhs[1]] = true acc_stat (`Set{ {lhs}, {rhs} }, cfg) end -literal_tags = table.transpose{ 'String', 'Number', 'True', 'False', 'Nil' } +local literal_tags = { String=1, Number=1, True=1, False=1, Nil=1 } -- pattern :: `Id{ string } -- term :: expr -function id_pattern_element_builder (pattern, term, cfg) +local function id_pattern_element_builder (pattern, term, cfg) assert (pattern.tag == "Id") - if pattern[1] == "_" then + if pattern[1] == "_" then -- "_" is used as a dummy var ==> no assignment, no == checking cfg.locals._ = true - elseif cfg.locals[pattern[1]] then + elseif cfg.locals[pattern[1]] then -- This var is already bound ==> test for equality acc_test (+{ -{term} ~= -{pattern} }, cfg) else -- Free var ==> bind it, and remember it for latter linearity checking - acc_assign (pattern, term, cfg) + acc_assign (pattern, term, cfg) cfg.locals[pattern[1]] = true end end +-- mutually recursive with table_pattern_element_builder +local pattern_element_builder + +-- pattern :: pattern and `Table{ } +-- term :: expr +local function table_pattern_element_builder (pattern, term, cfg) + local seen_dots, len = false, 0 + acc_test (+{ type( -{term} ) ~= "table" }, cfg) + for i = 1, #pattern do + local key, sub_pattern + if pattern[i].tag=="Pair" then -- Explicit key/value pair + key, sub_pattern = unpack (pattern[i]) + assert (literal_tags[key.tag], "Invalid key") + else -- Implicit key + len, key, sub_pattern = len+1, `Number{ len+1 }, pattern[i] + end + + -- '...' can only appear in final position + -- Could be fixed actually... + assert (not seen_dots, "Wrongly placed `...' ") + + if sub_pattern.tag == "Id" then + -- Optimization: save a useless [ v(n+1)=v(n).key ] + id_pattern_element_builder (sub_pattern, `Index{ term, key }, cfg) + if sub_pattern[1] ~= "_" then + acc_test (+{ -{sub_pattern} == nil }, cfg) + end + elseif sub_pattern.tag == "Dots" then + -- Remember where the capture is, and thatt arity checking shouldn't occur + seen_dots = true + else + -- Business as usual: + local v2 = next_tmpvar(cfg) + acc_assign (v2, `Index{ term, key }, cfg) + pattern_element_builder (sub_pattern, v2, cfg) + -- TODO: restore ntmp? + end + end + if seen_dots then -- remember how to retrieve `...' + -- FIXME: check, but there might be cases where the variable -{term} + -- will be overridden in contrieved tables. + -- ==> save it now, and clean the setting statement if unused + if cfg.dots_replacement then cfg.dots_replacement = 'ambiguous' + else cfg.dots_replacement = +{ select (-{`Number{len}}, unpack(-{term})) } end + else -- Check arity + acc_test (+{ #-{term} ~= -{`Number{len}} }, cfg) + end +end + +-- mutually recursive with pattern_element_builder +local eq_pattern_element_builder, regexp_pattern_element_builder + -- Concatenate code in [cfg.code], that will jump to label -- [cfg.on_failure] if [pattern] doesn't match [term]. [pattern] -- should be an identifier, or at least cheap to compute and @@ -143,7 +204,7 @@ end function pattern_element_builder (pattern, term, cfg) if literal_tags[pattern.tag] then acc_test (+{ -{term} ~= -{pattern} }, cfg) - elseif "Id" == pattern.tag then + elseif "Id" == pattern.tag then id_pattern_element_builder (pattern, term, cfg) elseif "Op" == pattern.tag and "div" == pattern[1] then regexp_pattern_element_builder (pattern, term, cfg) @@ -151,8 +212,10 @@ function pattern_element_builder (pattern, term, cfg) eq_pattern_element_builder (pattern, term, cfg) elseif "Table" == pattern.tag then table_pattern_element_builder (pattern, term, cfg) - else - error ("Invalid pattern: "..table.tostring(pattern, "nohash")) + else + error ("Invalid pattern at ".. + tostring(pattern.lineinfo).. + ": "..pp.tostring(pattern, {hide_hash=true})) end end @@ -166,16 +229,16 @@ end -- pattern :: `Op{ 'div', string, list{`Id string} or `Id{ string }} -- term :: expr -function regexp_pattern_element_builder (pattern, term, cfg) +local function regexp_pattern_element_builder (pattern, term, cfg) local op, regexp, sub_pattern = unpack(pattern) -- Sanity checks -- assert (op=='div', "Don't know what to do with that op in a pattern") - assert (regexp.tag=="String", + assert (regexp.tag=="String", "Left hand side operand for '/' in a pattern must be ".. "a literal string representing a regular expression") if sub_pattern.tag=="Table" then - for x in ivalues(sub_pattern) do + for _, x in ipairs(sub_pattern) do assert (x.tag=="Id" or x.tag=='Dots', "Right hand side operand for '/' in a pattern must be ".. "a list of identifiers") @@ -198,55 +261,10 @@ function regexp_pattern_element_builder (pattern, term, cfg) pattern_element_builder (sub_pattern, v2, cfg) end --- pattern :: pattern and `Table{ } --- term :: expr -function table_pattern_element_builder (pattern, term, cfg) - local seen_dots, len = false, 0 - acc_test (+{ type( -{term} ) ~= "table" }, cfg) - for i = 1, #pattern do - local key, sub_pattern - if pattern[i].tag=="Pair" then -- Explicit key/value pair - key, sub_pattern = unpack (pattern[i]) - assert (literal_tags[key.tag], "Invalid key") - else -- Implicit key - len, key, sub_pattern = len+1, `Number{ len+1 }, pattern[i] - end - - -- '...' can only appear in final position - -- Could be fixed actually... - assert (not seen_dots, "Wrongly placed `...' ") - - if sub_pattern.tag == "Id" then - -- Optimization: save a useless [ v(n+1)=v(n).key ] - id_pattern_element_builder (sub_pattern, `Index{ term, key }, cfg) - if sub_pattern[1] ~= "_" then - acc_test (+{ -{sub_pattern} == nil }, cfg) - end - elseif sub_pattern.tag == "Dots" then - -- Remember where the capture is, and thatt arity checking shouldn't occur - seen_dots = true - else - -- Business as usual: - local v2 = next_tmpvar(cfg) - acc_assign (v2, `Index{ term, key }, cfg) - pattern_element_builder (sub_pattern, v2, cfg) - -- TODO: restore ntmp? - end - end - if seen_dots then -- remember how to retrieve `...' - -- FIXME: check, but there might be cases where the variable -{term} - -- will be overridden in contrieved tables. - -- ==> save it now, and clean the setting statement if unused - if cfg.dots_replacement then cfg.dots_replacement = 'ambiguous' - else cfg.dots_replacement = +{ select (-{`Number{len}}, unpack(-{term})) } end - else -- Check arity - acc_test (+{ #-{term} ~= -{`Number{len}} }, cfg) - end -end -- Jumps to [cfg.on_faliure] if pattern_seq doesn't match -- term_seq. -function pattern_seq_builder (pattern_seq, term_seq, cfg) +local function pattern_seq_builder (pattern_seq, term_seq, cfg) if #pattern_seq ~= #term_seq then error ("Bad seq arity") end cfg.locals = { } -- reset bound variables between alternatives for i=1, #pattern_seq do @@ -265,12 +283,12 @@ end -- goto after_success -- label on_failure_i -------------------------------------------------- -function case_builder (case, term_seq, cfg) +local function case_builder (case, term_seq, cfg) local patterns_group, guard, block = unpack(case) - local on_success = mlp.gensym 'on_success' [1] + local on_success = gg.gensym 'on_success' [1] for i = 1, #patterns_group do local pattern_seq = patterns_group[i] - cfg.on_failure = mlp.gensym 'match_fail' [1] + cfg.on_failure = gg.gensym 'match_fail' [1] cfg.dots_replacement = false pattern_seq_builder (pattern_seq, term_seq, cfg) if i<#patterns_group then @@ -289,11 +307,11 @@ function case_builder (case, term_seq, cfg) acc_stat (`Label{cfg.on_failure}, cfg) end -function match_builder (x) +local function match_builder (x) local term_seq, cases = unpack(x) - local cfg = { + local cfg = { code = `Do{ }, - after_success = mlp.gensym "_after_success" } + after_success = gg.gensym "_after_success" } -- Some sharing issues occur when modifying term_seq, @@ -312,7 +330,7 @@ function match_builder (x) -- Temporary workaround: suppress the condition, so that -- all external variables are copied into unique names. --if t.tag ~= 'Id' and not literal_tags[t.tag] then - local v = mlp.gensym 'v' + local v = gg.gensym 'v' if not match_locals then match_locals = `Local{ {v}, {t} } else table.insert(match_locals[1], v) table.insert(match_locals[2], t) @@ -321,11 +339,11 @@ function match_builder (x) --end end term_seq = new_term_seq - + if match_locals then acc_stat(match_locals, cfg) end for i=1, #cases do - local case_cfg = { + local case_cfg = { after_success = cfg.after_success, code = `Do{ } -- locals = { } -- unnecessary, done by pattern_seq_builder @@ -334,41 +352,49 @@ function match_builder (x) if next (case_cfg.locals) then local case_locals = { } table.insert (case_cfg.code, 1, `Local{ case_locals, { } }) - for v in keys (case_cfg.locals) do + for v, _ in pairs (case_cfg.locals) do table.insert (case_locals, `Id{ v }) end end acc_stat(case_cfg.code, cfg) - end - acc_stat(+{error 'mismatch'}, cfg) - acc_stat(`Label{cfg.after_success}, cfg) - return cfg.code + end + local li = `String{tostring(cases.lineinfo)} + acc_stat(+{error('mismatch at '..-{li})}, cfg) + acc_stat(`Label{cfg.after_success}, cfg) + return cfg.code end ---------------------------------------------------------------------- -- Syntactical front-end ---------------------------------------------------------------------- -mlp.lexer:add{ "match", "with", "->" } -mlp.block.terminators:add "|" - -match_cases_list_parser = gg.list{ name = "match cases list", - gg.sequence{ name = "match case", - gg.list{ name = "match case patterns list", - primary = mlp.expr_list, - separators = "|", - terminators = { "->", "if" } }, - gg.onkeyword{ "if", mlp.expr, consume = true }, - "->", - mlp.block }, - separators = "|", - terminators = "end" } - -mlp.stat:add{ name = "match statement", - "match", - mlp.expr_list, - "with", gg.optkeyword "|", - match_cases_list_parser, - "end", - builder = |x| match_builder{ x[1], x[3] } } +local function extend(M) + + local _M = gg.future(M) + + checks('metalua.compiler.parser') + M.lexer:add{ "match", "with", "->" } + M.block.terminators:add "|" + + local match_cases_list_parser = gg.list{ name = "match cases list", + gg.sequence{ name = "match case", + gg.list{ name = "match case patterns list", + primary = _M.expr_list, + separators = "|", + terminators = { "->", "if" } }, + gg.onkeyword{ "if", _M.expr, consume = true }, + "->", + _M.block }, + separators = "|", + terminators = "end" } + + M.stat:add{ name = "match statement", + "match", + _M.expr_list, + "with", gg.optkeyword "|", + match_cases_list_parser, + "end", + builder = |x| match_builder{ x[1], x[3] } } +end +return extend \ No newline at end of file diff --git a/src/lib/metalua/extension/xmatch.mlua b/metalua/extension/xmatch.mlua similarity index 89% rename from src/lib/metalua/extension/xmatch.mlua rename to metalua/extension/xmatch.mlua index 29dccd8..71fd0b0 100644 --- a/src/lib/metalua/extension/xmatch.mlua +++ b/metalua/extension/xmatch.mlua @@ -1,3 +1,21 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- require 'metalua.extension.match' @@ -5,8 +23,6 @@ module ('spmatch', package.seeall) require 'metalua.walk.id' --{extension 'log'} - ---------------------------------------------------------------------- -- Back-end for statements -- "match function ..." and "local match function...". @@ -101,7 +117,7 @@ mlp.expr:add{ 'match', builder = |x| x[1], gg.multisequence{ local tested_term_seq, _, cases = unpack(x) local v = mlp.gensym 'match_expr' -- Replace expressions with blocks - for case in ivalues (cases) do + for _, case in ipairs (cases) do local body = case[3] case[3] = { `Set{ {v}, {body} } } end @@ -155,7 +171,7 @@ function bind (x) ------------------------------------------------------------------- local vars_not_in_pattern do vars_not_in_pattern = { } - for k in keys(vars) do + for k, _ in pairs(vars) do if not vars_in_pattern[k] then vars_not_in_pattern[k] = true end @@ -167,7 +183,7 @@ function bind (x) ------------------------------------------------------------------- if next(vars_not_in_pattern) then local loc = { } - for k in keys (vars_not_in_pattern) do + for k, _ in pairs(vars_not_in_pattern) do table.insert (loc, `Id{k}) end table.insert (code, 1, `Local{ loc, { } }) @@ -178,7 +194,7 @@ function bind (x) ------------------------------------------------------------------- local decl_list do decl_list = { } - for k in keys (vars_in_pattern) do + for k, _ in pairs(vars_in_pattern) do table.insert (decl_list, `Id{k}) end end diff --git a/src/compiler/gg.lua b/metalua/grammar/generator.lua similarity index 76% rename from src/compiler/gg.lua rename to metalua/grammar/generator.lua index baf098a..4633c6e 100644 --- a/src/compiler/gg.lua +++ b/metalua/grammar/generator.lua @@ -1,18 +1,29 @@ ----------------------------------------------------------------------- --- Metalua. +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. -- --- Summary: parser generator. Collection of higher order functors, --- which allow to build and combine parsers. Relies on a lexer --- that supports the same API as the one exposed in mll.lua. +-- All rights reserved. -- ----------------------------------------------------------------------- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html -- --- Copyright (c) 2006-2008, Fabien Fleutot . +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +-------------------------------------------------------------------------------- -- --- This software is released under the MIT Licence, see licence.txt --- for details. +-- Summary: parser generator. Collection of higher order functors, +-- which allow to build and combine parsers. Relies on a lexer +-- that supports the same API as the one exposed in mll.lua. -- ----------------------------------------------------------------------- +-------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -- @@ -26,48 +37,46 @@ -- * [gg.onkeyword()] -- * [gg.optkeyword()] -- --- Other functions: +-- Other functions: -- * [gg.parse_error()] -- * [gg.make_parser()] -- * [gg.is_parser()] -- -------------------------------------------------------------------------------- -module("gg", package.seeall) +local M = { } + +local lexer = require 'metalua.grammar.lexer' + +-------------------------------------------------------------------------------- +-- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier. +-- The main purpose is to avoid variable capture in macros. +-- +-- If a string is passed as an argument, theis string will be part of the +-- id name (helpful for macro debugging) +-------------------------------------------------------------------------------- +local gensymidx = 0 + +function M.gensym (arg) + gensymidx = gensymidx + 1 + return { tag="Id", string.format(".%i.%s", gensymidx, arg or "")} +end + ------------------------------------------------------------------------------- -- parser metatable, which maps __call to method parse, and adds some -- error tracing boilerplate. ------------------------------------------------------------------------------- local parser_metatable = { } -function parser_metatable.__call (parser, lx, ...) - --printf ("Call parser %q of type %q", parser.name or "?", parser.kind) - if mlc.metabugs then - return parser:parse (lx, ...) - --local x = parser:parse (lx, ...) - --printf ("Result of parser %q: %s", - -- parser.name or "?", - -- _G.table.tostring(x, "nohash", 80)) - --return x - else - local li = lx:lineinfo_right() or { "?", "?", "?", "?" } - local status, ast = pcall (parser.parse, parser, lx, ...) - if status then return ast else - -- Try to replace the gg.lua location, in the error msg, with - -- the place where the current parser started handling the - -- lexstream. - -- Since the error is rethrown, these places are stacked. - error (string.format ("%s\n - (l.%s, c.%s, k.%s) in parser %s", - ast :strmatch "gg.lua:%d+: (.*)" or ast, - li[1], li[2], li[3], parser.name or parser.kind)) - end - end + +function parser_metatable :__call (lx, ...) + return self :parse (lx, ...) end ------------------------------------------------------------------------------- -- Turn a table into a parser, mainly by setting the metatable. ------------------------------------------------------------------------------- -function make_parser(kind, p) +function M.make_parser(kind, p) p.kind = kind if not p.transformers then p.transformers = { } end function p.transformers:add (x) @@ -81,29 +90,32 @@ end -- Return true iff [x] is a parser. -- If it's a gg-generated parser, return the name of its kind. ------------------------------------------------------------------------------- -function is_parser (x) +function M.is_parser (x) return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind end ------------------------------------------------------------------------------- --- Parse a sequence, without applying builder nor transformers +-- Parse a sequence, without applying builder nor transformers. ------------------------------------------------------------------------------- local function raw_parse_sequence (lx, p) - local r = { } - for i=1, #p do - e=p[i] - if type(e) == "string" then - if not lx:is_keyword (lx:next(), e) then - parse_error (lx, "A keyword was expected, probably `%s'.", e) end - elseif is_parser (e) then - table.insert (r, e (lx)) - else - gg.parse_error (lx,"Sequence `%s': element #%i is neither a string ".. - "nor a parser: %s", - p.name, i, table.tostring(e)) - end - end - return r + local r = { } + for i=1, #p do + local e=p[i] + if type(e) == "string" then + local kw = lx :next() + if not lx :is_keyword (kw, e) then + M.parse_error( + lx, "A keyword was expected, probably `%s'.", e) + end + elseif M.is_parser (e) then + table.insert (r, e(lx)) + else -- Invalid parser definition, this is *not* a parsing error + error(string.format( + "Sequence `%s': element #%i is neither a string nor a parser: %s", + p.name, i, table.tostring(e))) + end + end + return r end ------------------------------------------------------------------------------- @@ -124,10 +136,10 @@ local function transform (ast, parser, fli, lli) if parser.transformers then for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end end - if type(ast) == 'table'then + if type(ast) == 'table' then local ali = ast.lineinfo if not ali or ali.first~=fli or ali.last~=lli then - ast.lineinfo = { first = fli, last = lli } + ast.lineinfo = lexer.new_lineinfo(fli, lli) end end return ast @@ -136,21 +148,32 @@ end ------------------------------------------------------------------------------- -- Generate a tracable parsing error (not implemented yet) ------------------------------------------------------------------------------- -function parse_error(lx, fmt, ...) - local li = lx:lineinfo_left() or {-1,-1,-1, ""} - local msg = string.format("line %i, char %i: "..fmt, li[1], li[2], ...) +function M.parse_error(lx, fmt, ...) + local li = lx:lineinfo_left() + local file, line, column, offset, positions + if li then + file, line, column, offset = li.source, li.line, li.column, li.offset + positions = { first = li, last = li } + else + line, column, offset = -1, -1, -1 + end + + local msg = string.format("line %i, char %i: "..fmt, line, column, ...) + if file and file~='?' then msg = "file "..file..", "..msg end + local src = lx.src - if li[3]>0 and src then - local i, j = li[3], li[3] + if offset>0 and src then + local i, j = offset, offset while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end - while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end + while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end local srcline = src:sub (i+1, j-1) - local idx = string.rep (" ", li[2]).."^" + local idx = string.rep (" ", column).."^" msg = string.format("%s\n>>> %s\n>>> %s", msg, srcline, idx) end + --lx :kill() error(msg) end - + ------------------------------------------------------------------------------- -- -- Sequence parser generator @@ -169,7 +192,7 @@ end -- * [transformers]: a list of AST->AST functions, applied in order on ASTs -- returned by the parser. -- --- * Table-part entries corresponds to keywords (strings) and subparsers +-- * Table-part entries corresponds to keywords (strings) and subparsers -- (function and callable objects). -- -- After creation, the following fields are added: @@ -178,13 +201,14 @@ end -- * [name] is set, if it wasn't in the input. -- ------------------------------------------------------------------------------- -function sequence (p) - make_parser ("sequence", p) +function M.sequence (p) + M.make_parser ("sequence", p) ------------------------------------------------------------------- -- Parsing method ------------------------------------------------------------------- function p:parse (lx) + -- Raw parsing: local fli = lx:lineinfo_right() local seq = raw_parse_sequence (lx, self) @@ -213,7 +237,7 @@ function sequence (p) p.name = p[1] .. " ... " .. p[#p] else p.name = p[1] .. " ..." end else -- can't find a decent name - p.name = "" + p.name = "unnamed_sequence" end return p @@ -258,52 +282,50 @@ end -- -- * [kind] == "multisequence" -- ------------------------------------------------------------------------------- -function multisequence (p) - make_parser ("multisequence", p) +function M.multisequence (p) + M.make_parser ("multisequence", p) ------------------------------------------------------------------- -- Add a sequence (might be just a config table for [gg.sequence]) ------------------------------------------------------------------- - function p:add (s) + function p :add (s) -- compile if necessary: local keyword = type(s)=='table' and s[1] - if type(s)=='table' and not is_parser(s) then sequence(s) end - if is_parser(s)~='sequence' or type(keyword)~='string' then + if type(s)=='table' and not M.is_parser(s) then M.sequence(s) end + if M.is_parser(s)~='sequence' or type(keyword)~='string' then if self.default then -- two defaults error ("In a multisequence parser, all but one sequences ".. "must start with a keyword") else self.default = s end -- first default - elseif self.sequences[keyword] then -- duplicate keyword - eprintf (" *** Warning: keyword %q overloaded in multisequence ***", - keyword) - self.sequences[keyword] = s - else -- newly caught keyword + else + if self.sequences[keyword] then -- duplicate keyword + -- TODO: warn that initial keyword `keyword` is overloaded in multiseq + end self.sequences[keyword] = s - end + end end -- ------------------------------------------------------------------- -- Get the sequence starting with this keyword. [kw :: string] ------------------------------------------------------------------- - function p:get (kw) return self.sequences [kw] end + function p :get (kw) return self.sequences [kw] end ------------------------------------------------------------------- -- Remove the sequence starting with keyword [kw :: string] ------------------------------------------------------------------- - function p:del (kw) - if not self.sequences[kw] then - eprintf("*** Warning: trying to delete sequence starting ".. - "with %q from a multisequence having no such ".. - "entry ***", kw) end + function p :del (kw) + if not self.sequences[kw] then + -- TODO: warn that we try to delete a non-existent entry + end local removed = self.sequences[kw] - self.sequences[kw] = nil + self.sequences[kw] = nil return removed end ------------------------------------------------------------------- -- Parsing method ------------------------------------------------------------------- - function p:parse (lx) + function p :parse (lx) local fli = lx:lineinfo_right() local x = raw_parse_multisequence (lx, self.sequences, self.default) local lli = lx:lineinfo_left() @@ -317,7 +339,7 @@ function multisequence (p) -- from the array part of the parser to the hash part of field -- [sequences] p.sequences = { } - for i=1, #p do p:add (p[i]); p[i] = nil end + for i=1, #p do p :add (p[i]); p[i] = nil end -- FIXME: why is this commented out? --if p.default and not is_parser(p.default) then sequence(p.default) end @@ -342,9 +364,9 @@ end -- -- * the builder takes specific parameters: -- - for [prefix], it takes the result of the prefix sequence parser, -- and the prefixed expression --- - for [infix], it takes the left-hand-side expression, the results +-- - for [infix], it takes the left-hand-side expression, the results -- of the infix sequence parser, and the right-hand-side expression. --- - for [suffix], it takes the suffixed expression, and theresult +-- - for [suffix], it takes the suffixed expression, and the result -- of the suffix sequence parser. -- -- * the default field is a list, with parameters: @@ -357,7 +379,7 @@ end -- -- In [p], useful fields are: -- * [transformers]: as usual -- * [name]: as usual --- * [primary]: the atomic expression parser, or a multisequence config +-- * [primary]: the atomic expression parser, or a multisequence config -- table (mandatory) -- * [prefix]: prefix operators config table, see above. -- * [infix]: infix operators config table, see above. @@ -366,12 +388,12 @@ end -- -- After creation, these fields are added: -- * [kind] == "expr" -- * [parse] as usual --- * each table is turned into a multisequence, and therefore has an +-- * each table is turned into a multisequence, and therefore has an -- [add] method -- ------------------------------------------------------------------------------- -function expr (p) - make_parser ("expr", p) +function M.expr (p) + M.make_parser ("expr", p) ------------------------------------------------------------------- -- parser method. @@ -379,7 +401,7 @@ function expr (p) -- it won't read expressions whose precedence is lower or equal -- to [prec]. ------------------------------------------------------------------- - function p:parse (lx, prec) + function p :parse (lx, prec) prec = prec or 0 ------------------------------------------------------ @@ -388,7 +410,7 @@ function expr (p) -- Options include prec, assoc, transformers. ------------------------------------------------------ local function get_parser_info (tab) - local p2 = tab:get (lx:is_keyword (lx:peek())) + local p2 = tab :get (lx :is_keyword (lx :peek())) if p2 then -- keyword-based sequence found local function parser(lx) return raw_parse_sequence(lx, p2) end return parser, p2 @@ -406,17 +428,17 @@ function expr (p) -- expr, and one for the one with the prefix op. ------------------------------------------------------ local function handle_prefix () - local fli = lx:lineinfo_right() + local fli = lx :lineinfo_right() local p2_func, p2 = get_parser_info (self.prefix) local op = p2_func and p2_func (lx) if op then -- Keyword-based sequence found - local ili = lx:lineinfo_right() -- Intermediate LineInfo - local e = p2.builder (op, self:parse (lx, p2.prec)) - local lli = lx:lineinfo_left() + local ili = lx :lineinfo_right() -- Intermediate LineInfo + local e = p2.builder (op, self :parse (lx, p2.prec)) + local lli = lx :lineinfo_left() return transform (transform (e, p2, ili, lli), self, fli, lli) - else -- No prefix found, get a primary expression + else -- No prefix found, get a primary expression local e = self.primary(lx) - local lli = lx:lineinfo_left() + local lli = lx :lineinfo_left() return transform (e, self, fli, lli) end end -- @@ -432,7 +454,7 @@ function expr (p) ----------------------------------------- -- Handle flattening operators: gather all operands - -- of the series in [list]; when a different operator + -- of the series in [list]; when a different operator -- is found, stop, build from [list], [transform] and -- return. ----------------------------------------- @@ -449,13 +471,13 @@ function expr (p) local e2 = pflat.builder (list) local lli = lx:lineinfo_left() return transform (transform (e2, pflat, fli, lli), self, fli, lli) - + ----------------------------------------- -- Handle regular infix operators: [e] the LHS is known, -- just gather the operator and [e2] the RHS. -- Result goes in [e3]. ----------------------------------------- - elseif p2.prec and p2.prec>prec or + elseif p2.prec and p2.prec>prec or p2.prec==prec and p2.assoc=="right" then local fli = e.lineinfo.first -- lx:lineinfo_right() local op = p2_func(lx) @@ -466,10 +488,10 @@ function expr (p) return transform (transform (e3, p2, fli, lli), self, fli, lli) ----------------------------------------- - -- Check for non-associative operators, and complain if applicable. + -- Check for non-associative operators, and complain if applicable. ----------------------------------------- elseif p2.assoc=="none" and p2.prec==prec then - parse_error (lx, "non-associative operator!") + M.parse_error (lx, "non-associative operator!") ----------------------------------------- -- No infix operator suitable at that precedence @@ -501,7 +523,7 @@ function expr (p) end -- ------------------------------------------------------ - -- Parser body: read suffix and (infix+operand) + -- Parser body: read suffix and (infix+operand) -- extensions as long as we're able to fetch more at -- this precedence level. ------------------------------------------------------ @@ -521,7 +543,7 @@ function expr (p) if not p.primary then p.primary=p[1]; p[1]=nil end for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do if not p[t] then p[t] = { } end - if not is_parser(p[t]) then multisequence(p[t]) end + if not M.is_parser(p[t]) then M.multisequence(p[t]) end end function p:add(...) return self.primary:add(...) end return p @@ -558,40 +580,43 @@ end -- -- * [kind] == "list" -- ------------------------------------------------------------------------------- -function list (p) - make_parser ("list", p) +function M.list (p) + M.make_parser ("list", p) ------------------------------------------------------------------- -- Parsing method ------------------------------------------------------------------- - function p:parse (lx) + function p :parse (lx) ------------------------------------------------------ - -- Used to quickly check whether there's a terminator + -- Used to quickly check whether there's a terminator -- or a separator immediately ahead ------------------------------------------------------ - local function peek_is_in (keywords) + local function peek_is_in (keywords) return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end local x = { } - local fli = lx:lineinfo_right() + local fli = lx :lineinfo_right() -- if there's a terminator to start with, don't bother trying - if not peek_is_in (self.terminators) then - repeat table.insert (x, self.primary (lx)) -- read one element + local is_empty_list = self.terminators and (peek_is_in (self.terminators) or lx:peek().tag=="Eof") + if not is_empty_list then + repeat + local item = self.primary(lx) + table.insert (x, item) -- read one element until - -- First reason to stop: There's a separator list specified, - -- and next token isn't one. Otherwise, consume it with [lx:next()] + -- There's a separator list specified, and next token isn't in it. + -- Otherwise, consume it with [lx:next()] self.separators and not(peek_is_in (self.separators) and lx:next()) or - -- Other reason to stop: terminator token ahead + -- Terminator token ahead peek_is_in (self.terminators) or -- Last reason: end of file reached lx:peek().tag=="Eof" end local lli = lx:lineinfo_left() - - -- Apply the builder. It can be a string, or a callable value, + + -- Apply the builder. It can be a string, or a callable value, -- or simply nothing. local b = self.builder if b then @@ -620,10 +645,10 @@ end -- ------------------------------------------------------------------------------- -- --- Keyword-conditionned parser generator +-- Keyword-conditioned parser generator -- ------------------------------------------------------------------------------- --- +-- -- Only apply a parser if a given keyword is found. The result of -- [gg.onkeyword] parser is the result of the subparser (modulo -- [transformers] applications). @@ -639,10 +664,10 @@ end -- -- -- * [transformers]: as usual -- --- * [peek]: if non-nil, the conditionning keyword is left in the lexeme +-- * [peek]: if non-nil, the conditioning keyword is left in the lexeme -- stream instead of being consumed. -- --- * [primary]: the subparser. +-- * [primary]: the subparser. -- -- * [keywords]: list of strings representing triggering keywords. -- @@ -650,26 +675,27 @@ end -- -- Strings are put in [keywords], and the parser is put in [primary]. -- -- After the call, the following fields will be set: --- +-- -- * [parse] the parsing method -- * [kind] == "onkeyword" -- * [primary] -- * [keywords] -- ------------------------------------------------------------------------------- -function onkeyword (p) - make_parser ("onkeyword", p) +function M.onkeyword (p) + M.make_parser ("onkeyword", p) ------------------------------------------------------------------- -- Parsing method ------------------------------------------------------------------- - function p:parse(lx) - if lx:is_keyword (lx:peek(), unpack(self.keywords)) then - --local fli = lx:lineinfo_right() + function p :parse (lx) + if lx :is_keyword (lx:peek(), unpack(self.keywords)) then + local fli = lx:lineinfo_right() if not self.peek then lx:next() end local content = self.primary (lx) - --local lli = lx:lineinfo_left() - local fli, lli = content.lineinfo.first, content.lineinfo.last + local lli = lx:lineinfo_left() + local li = content.lineinfo or { } + fli, lli = li.first or fli, li.last or lli return transform (content, p, fli, lli) else return false end end @@ -680,10 +706,9 @@ function onkeyword (p) if not p.keywords then p.keywords = { } end for _, x in ipairs(p) do if type(x)=="string" then table.insert (p.keywords, x) - else assert (not p.primary and is_parser (x)); p.primary = x end + else assert (not p.primary and M.is_parser (x)); p.primary = x end end - if not next (p.keywords) then - eprintf("Warning, no keyword to trigger gg.onkeyword") end + assert (next (p.keywords), "Missing trigger keyword in gg.onkeyword") assert (p.primary, 'no primary parser in gg.onkeyword') return p end -- @@ -696,15 +721,15 @@ end -- ------------------------------------------------------------------------------- -- -- This doesn't return a real parser, just a function. That function parses --- one of the keywords passed as parameters, and returns it. It returns +-- one of the keywords passed as parameters, and returns it. It returns -- [false] if no matching keyword is found. -- -- Notice that tokens returned by lexer already carry lineinfo, therefore -- there's no need to add them, as done usually through transform() calls. ------------------------------------------------------------------------------- -function optkeyword (...) +function M.optkeyword (...) local args = {...} - if type (args[1]) == "table" then + if type (args[1]) == "table" then assert (#args == 1) args = args[1] end @@ -729,15 +754,15 @@ end -- The resulting parser returns whatever the argument parser does. -- ------------------------------------------------------------------------------- -function with_lexer(new_lexer, parser) +function M.with_lexer(new_lexer, parser) ------------------------------------------------------------------- - -- Most gg functions take their parameters in a table, so it's + -- Most gg functions take their parameters in a table, so it's -- better to silently accept when with_lexer{ } is called with -- its arguments in a list: ------------------------------------------------------------------- if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then - return with_lexer(unpack(new_lexer)) + return M.with_lexer(unpack(new_lexer)) end ------------------------------------------------------------------- @@ -754,3 +779,54 @@ function with_lexer(new_lexer, parser) if status then return result else error(result) end end end + +-------------------------------------------------------------------------------- +-- +-- Make sure a parser is used and returns successfully. +-- +-------------------------------------------------------------------------------- +function M.nonempty(primary) + local p = M.make_parser('non-empty list', { primary = primary, name=primary.name }) + function p :parse (lx) + local fli = lx:lineinfo_right() + local content = self.primary (lx) + local lli = lx:lineinfo_left() + local li = content.lineinfo or { } + fli, lli = li.first or fli, li.last or lli + if #content == 0 then + M.parse_error (lx, "`%s' must not be empty.", self.name or "list") + else + return transform (content, self, fli, lli) + end + end + return p +end + +local FUTURE_MT = { } +function FUTURE_MT:__tostring() return "" end +function FUTURE_MT:__newindex(key, value) error "don't write in futures" end +function FUTURE_MT :__index (parser_name) + return function(...) + local p, m = rawget(self, '__path'), self.__module + if p then for _, name in ipairs(p) do + m=rawget(m, name) + if not m then error ("Submodule '"..name.."' undefined") end + end end + local f = rawget(m, parser_name) + if not f then error ("Parser '"..parser_name.."' undefined") end + return f(...) + end +end + +function M.future(module, ...) + checks('table') + local path = ... and {...} + if path then for _, x in ipairs(path) do + assert(type(x)=='string', "Bad future arg") + end end + local self = { __module = module, + __path = path } + return setmetatable(self, FUTURE_MT) +end + +return M diff --git a/metalua/grammar/lexer.lua b/metalua/grammar/lexer.lua new file mode 100644 index 0000000..0a58058 --- /dev/null +++ b/metalua/grammar/lexer.lua @@ -0,0 +1,672 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +require 'checks' + +local M = { } + +local lexer = { alpha={ }, sym={ } } +lexer.__index=lexer +lexer.__type='lexer.stream' + +M.lexer = lexer + + +local debugf = function() end +-- local debugf=printf + +---------------------------------------------------------------------- +-- Some locale settings produce bad results, e.g. French locale +-- expect float numbers to use commas instead of periods. +-- TODO: change number parser into something loclae-independent, +-- locales are nasty. +---------------------------------------------------------------------- +os.setlocale('C') + +local MT = { } + +M.metatables=MT + +---------------------------------------------------------------------- +-- Create a new metatable, for a new class of objects. +---------------------------------------------------------------------- +local function new_metatable(name) + local mt = { __type = 'lexer.'..name }; + mt.__index = mt + MT[name] = mt +end + + +---------------------------------------------------------------------- +-- Position: represent a point in a source file. +---------------------------------------------------------------------- +new_metatable 'position' + +local position_idx=1 + +function M.new_position(line, column, offset, source) + checks('number', 'number', 'number', 'string') + local id = position_idx; position_idx = position_idx+1 + return setmetatable({line=line, column=column, offset=offset, + source=source, id=id}, MT.position) +end + +function MT.position :__tostring() + return string.format("<%s%s|L%d|C%d|K%d>", + self.comments and "C|" or "", + self.source, self.line, self.column, self.offset) +end + + + +---------------------------------------------------------------------- +-- Position factory: convert offsets into line/column/offset positions. +---------------------------------------------------------------------- +new_metatable 'position_factory' + +function M.new_position_factory(src, src_name) + -- assert(type(src)=='string') + -- assert(type(src_name)=='string') + local lines = { 1 } + for offset in src :gmatch '\n()' do table.insert(lines, offset) end + local max = #src+1 + table.insert(lines, max+1) -- +1 includes Eof + return setmetatable({ src_name=src_name, line2offset=lines, max=max }, + MT.position_factory) +end + +function MT.position_factory :get_position (offset) + -- assert(type(offset)=='number') + assert(offset<=self.max) + local line2offset = self.line2offset + local left = self.last_left or 1 + if offset", + fli.comments and "C|" or "", + fli.source, line, column, offset, + lli.comments and "|C" or "") +end + +---------------------------------------------------------------------- +-- Token: atomic Lua language element, with a category, a content, +-- and some lineinfo relating it to its original source. +---------------------------------------------------------------------- +new_metatable 'token' + +function M.new_token(tag, content, lineinfo) + --printf("TOKEN `%s{ %q, lineinfo = %s} boundaries %d, %d", + -- tag, content, tostring(lineinfo), lineinfo.first.id, lineinfo.last.id) + return setmetatable({tag=tag, lineinfo=lineinfo, content}, MT.token) +end + +function MT.token :__tostring() + --return string.format("`%s{ %q, %s }", self.tag, self[1], tostring(self.lineinfo)) + return string.format("`%s %q", self.tag, self[1]) +end + + +---------------------------------------------------------------------- +-- Comment: series of comment blocks with associated lineinfo. +-- To be attached to the tokens just before and just after them. +---------------------------------------------------------------------- +new_metatable 'comment' + +function M.new_comment(lines) + local first = lines[1].lineinfo.first + local last = lines[#lines].lineinfo.last + local lineinfo = M.new_lineinfo(first, last) + return setmetatable({lineinfo=lineinfo, unpack(lines)}, MT.comment) +end + +function MT.comment :text() + local last_line = self[1].lineinfo.last.line + local acc = { } + for i, line in ipairs(self) do + local nreturns = line.lineinfo.first.line - last_line + table.insert(acc, ("\n"):rep(nreturns)) + table.insert(acc, line[1]) + end + return table.concat(acc) +end + +function M.new_comment_line(text, lineinfo, nequals) + checks('string', 'lexer.lineinfo', '?number') + return { lineinfo = lineinfo, text, nequals } +end + + + +---------------------------------------------------------------------- +-- Patterns used by [lexer :extract] to decompose the raw string into +-- correctly tagged tokens. +---------------------------------------------------------------------- +lexer.patterns = { + spaces = "^[ \r\n\t]*()", + short_comment = "^%-%-([^\n]*)\n?()", + --final_short_comment = "^%-%-([^\n]*)()$", + long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()", + long_string = "^%[(=*)%[\n?(.-)%]%1%]()", + number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" }, + number_mantissa_hex = { "^%x+%.?%x*()", "^%x*%.%x+()" }, --Lua5.1 and Lua5.2 + number_exponant = "^[eE][%+%-]?%d+()", + number_exponant_hex = "^[pP][%+%-]?%d+()", --Lua5.2 + number_hex = "^0[xX]()", + word = "^([%a_][%w_]*)()" +} + +---------------------------------------------------------------------- +-- unescape a whole string, applying [unesc_digits] and +-- [unesc_letter] as many times as required. +---------------------------------------------------------------------- +local function unescape_string (s) + + -- Turn the digits of an escape sequence into the corresponding + -- character, e.g. [unesc_digits("123") == string.char(123)]. + local function unesc_digits (backslashes, digits) + if #backslashes%2==0 then + -- Even number of backslashes, they escape each other, not the digits. + -- Return them so that unesc_letter() can treat them + return backslashes..digits + else + -- Remove the odd backslash, which escapes the number sequence. + -- The rest will be returned and parsed by unesc_letter() + backslashes = backslashes :sub (1,-2) + end + local k, j, i = digits :reverse() :byte(1, 3) + local z = string.byte "0" + local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z + if code > 255 then + error ("Illegal escape sequence '\\"..digits.. + "' in string: ASCII codes must be in [0..255]") + end + local c = string.char (code) + if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\092b" --> "\\b") + return backslashes..c + end + + -- Turn hex digits of escape sequence into char. + local function unesc_hex(backslashes, digits) + if #backslashes%2==0 then + return backslashes..'x'..digits + else + backslashes = backslashes :sub (1,-2) + end + local c = string.char(tonumber(digits,16)) + if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\x5cb" --> "\\b") + return backslashes..c + end + + -- Handle Lua 5.2 \z sequences + local function unesc_z(backslashes, more) + if #backslashes%2==0 then + return backslashes..more + else + return backslashes :sub (1,-2) + end + end + + -- Take a letter [x], and returns the character represented by the + -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"]. + local function unesc_letter(x) + local t = { + a = "\a", b = "\b", f = "\f", + n = "\n", r = "\r", t = "\t", v = "\v", + ["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" } + return t[x] or x + end + + s = s: gsub ("(\\+)(z%s*)", unesc_z) -- Lua 5.2 + s = s: gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits) + s = s: gsub ("(\\+)x([0-9a-fA-F][0-9a-fA-F])", unesc_hex) -- Lua 5.2 + s = s: gsub ("\\(%D)",unesc_letter) + return s +end + +lexer.extractors = { + "extract_long_comment", "extract_short_comment", + "extract_short_string", "extract_word", "extract_number", + "extract_long_string", "extract_symbol" } + + + +---------------------------------------------------------------------- +-- Really extract next token from the raw string +-- (and update the index). +-- loc: offset of the position just after spaces and comments +-- previous_i: offset in src before extraction began +---------------------------------------------------------------------- +function lexer :extract () + local attached_comments = { } + local function gen_token(...) + local token = M.new_token(...) + if #attached_comments>0 then -- attach previous comments to token + local comments = M.new_comment(attached_comments) + token.lineinfo.first.comments = comments + if self.lineinfo_last_extracted then + self.lineinfo_last_extracted.comments = comments + end + attached_comments = { } + end + token.lineinfo.first.facing = self.lineinfo_last_extracted + self.lineinfo_last_extracted.facing = assert(token.lineinfo.first) + self.lineinfo_last_extracted = assert(token.lineinfo.last) + return token + end + while true do -- loop until a non-comment token is found + + -- skip whitespaces + self.i = self.src:match (self.patterns.spaces, self.i) + if self.i>#self.src then + local fli = self.posfact :get_position (#self.src+1) + local lli = self.posfact :get_position (#self.src+1) -- ok? + local tok = gen_token("Eof", "eof", M.new_lineinfo(fli, lli)) + tok.lineinfo.last.facing = lli + return tok + end + local i_first = self.i -- loc = position after whitespaces + + -- try every extractor until a token is found + for _, extractor in ipairs(self.extractors) do + local tag, content, xtra = self [extractor] (self) + if tag then + local fli = self.posfact :get_position (i_first) + local lli = self.posfact :get_position (self.i-1) + local lineinfo = M.new_lineinfo(fli, lli) + if tag=='Comment' then + local prev_comment = attached_comments[#attached_comments] + if not xtra -- new comment is short + and prev_comment and not prev_comment[2] -- prev comment is short + and prev_comment.lineinfo.last.line+1==fli.line then -- adjascent lines + -- concat with previous comment + prev_comment[1] = prev_comment[1].."\n"..content -- TODO quadratic, BAD! + prev_comment.lineinfo.last = lli + else -- accumulate comment + local comment = M.new_comment_line(content, lineinfo, xtra) + table.insert(attached_comments, comment) + end + break -- back to skipping spaces + else -- not a comment: real token, then + return gen_token(tag, content, lineinfo) + end -- if token is a comment + end -- if token found + end -- for each extractor + end -- while token is a comment +end -- :extract() + + + + +---------------------------------------------------------------------- +-- Extract a short comment. +---------------------------------------------------------------------- +function lexer :extract_short_comment() + -- TODO: handle final_short_comment + local content, j = self.src :match (self.patterns.short_comment, self.i) + if content then self.i=j; return 'Comment', content, nil end +end + +---------------------------------------------------------------------- +-- Extract a long comment. +---------------------------------------------------------------------- +function lexer :extract_long_comment() + local equals, content, j = self.src:match (self.patterns.long_comment, self.i) + if j then self.i = j; return "Comment", content, #equals end +end + +---------------------------------------------------------------------- +-- Extract a '...' or "..." short string. +---------------------------------------------------------------------- +function lexer :extract_short_string() + local k = self.src :sub (self.i,self.i) -- first char + if k~=[[']] and k~=[["]] then return end -- no match' + local i = self.i + 1 + local j = i + while true do + local x,y; x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j) -- next interesting char + if x == '\\' then + if y == 'z' then -- Lua 5.2 \z + j = self.src :match ("^%s*()", j+1) + else + j=j+1 -- escaped char + end + elseif x == k then break -- end of string + else + assert (not x or x=='\r' or x=='\n') + return nil, 'Unterminated string' + end + end + self.i = j + + return 'String', unescape_string (self.src :sub (i,j-2)) +end + +---------------------------------------------------------------------- +-- Extract Id or Keyword. +---------------------------------------------------------------------- +function lexer :extract_word() + local word, j = self.src:match (self.patterns.word, self.i) + if word then + self.i = j + return (self.alpha [word] and 'Keyword' or 'Id'), word + end +end + +---------------------------------------------------------------------- +-- Extract Number. +---------------------------------------------------------------------- +function lexer :extract_number() + local j = self.src:match(self.patterns.number_hex, self.i) + if j then + j = self.src:match (self.patterns.number_mantissa_hex[1], j) or + self.src:match (self.patterns.number_mantissa_hex[2], j) + if j then + j = self.src:match (self.patterns.number_exponant_hex, j) or j + end + else + j = self.src:match (self.patterns.number_mantissa[1], self.i) or + self.src:match (self.patterns.number_mantissa[2], self.i) + if j then + j = self.src:match (self.patterns.number_exponant, j) or j + end + end + if not j then return end + -- Number found, interpret with tonumber() and return it + local str = self.src:sub (self.i, j-1) + -- :TODO: tonumber on Lua5.2 floating hex may or may not work on Lua5.1 + local n = tonumber (str) + if not n then error(str.." is not a valid number according to tonumber()") end + self.i = j + return 'Number', n +end + +---------------------------------------------------------------------- +-- Extract long string. +---------------------------------------------------------------------- +function lexer :extract_long_string() + local _, content, j = self.src :match (self.patterns.long_string, self.i) + if j then self.i = j; return 'String', content end +end + +---------------------------------------------------------------------- +-- Extract symbol. +---------------------------------------------------------------------- +function lexer :extract_symbol() + local k = self.src:sub (self.i,self.i) + local symk = self.sym [k] -- symbols starting with `k` + if not symk then + self.i = self.i + 1 + return 'Keyword', k + end + for _, sym in pairs (symk) do + if sym == self.src:sub (self.i, self.i + #sym - 1) then + self.i = self.i + #sym + return 'Keyword', sym + end + end + self.i = self.i+1 + return 'Keyword', k +end + +---------------------------------------------------------------------- +-- Add a keyword to the list of keywords recognized by the lexer. +---------------------------------------------------------------------- +function lexer :add (w, ...) + assert(not ..., "lexer :add() takes only one arg, although possibly a table") + if type (w) == "table" then + for _, x in ipairs (w) do self :add (x) end + else + if w:match (self.patterns.word .. "$") then self.alpha [w] = true + elseif w:match "^%p%p+$" then + local k = w:sub(1,1) + local list = self.sym [k] + if not list then list = { }; self.sym [k] = list end + table.insert (list, w) + elseif w:match "^%p$" then return + else error "Invalid keyword" end + end +end + +---------------------------------------------------------------------- +-- Return the [n]th next token, without consuming it. +-- [n] defaults to 1. If it goes pass the end of the stream, an EOF +-- token is returned. +---------------------------------------------------------------------- +function lexer :peek (n) + if not n then n=1 end + if n > #self.peeked then + for i = #self.peeked+1, n do + self.peeked [i] = self :extract() + end + end + return self.peeked [n] +end + +---------------------------------------------------------------------- +-- Return the [n]th next token, removing it as well as the 0..n-1 +-- previous tokens. [n] defaults to 1. If it goes pass the end of the +-- stream, an EOF token is returned. +---------------------------------------------------------------------- +function lexer :next (n) + n = n or 1 + self :peek (n) + local a + for i=1,n do + a = table.remove (self.peeked, 1) + -- TODO: is this used anywhere? I think not. a.lineinfo.last may be nil. + --self.lastline = a.lineinfo.last.line + end + self.lineinfo_last_consumed = a.lineinfo.last + return a +end + +---------------------------------------------------------------------- +-- Returns an object which saves the stream's current state. +---------------------------------------------------------------------- +-- FIXME there are more fields than that to save +function lexer :save () return { self.i; {unpack(self.peeked) } } end + +---------------------------------------------------------------------- +-- Restore the stream's state, as saved by method [save]. +---------------------------------------------------------------------- +-- FIXME there are more fields than that to restore +function lexer :restore (s) self.i=s[1]; self.peeked=s[2] end + +---------------------------------------------------------------------- +-- Resynchronize: cancel any token in self.peeked, by emptying the +-- list and resetting the indexes +---------------------------------------------------------------------- +function lexer :sync() + local p1 = self.peeked[1] + if p1 then + local li_first = p1.lineinfo.first + if li_first.comments then li_first=li_first.comments.lineinfo.first end + self.i = li_first.offset + self.column_offset = self.i - li_first.column + self.peeked = { } + self.attached_comments = p1.lineinfo.first.comments or { } + end +end + +---------------------------------------------------------------------- +-- Take the source and offset of an old lexer. +---------------------------------------------------------------------- +function lexer :takeover(old) + self :sync(); old :sync() + for _, field in ipairs{ 'i', 'src', 'attached_comments', 'posfact' } do + self[field] = old[field] + end + return self +end + +---------------------------------------------------------------------- +-- Return the current position in the sources. This position is between +-- two tokens, and can be within a space / comment area, and therefore +-- have a non-null width. :lineinfo_left() returns the beginning of the +-- separation area, :lineinfo_right() returns the end of that area. +-- +-- ____ last consummed token ____ first unconsummed token +-- / / +-- XXXXX YYYYY +-- \____ \____ +-- :lineinfo_left() :lineinfo_right() +---------------------------------------------------------------------- +function lexer :lineinfo_right() + return self :peek(1).lineinfo.first +end + +function lexer :lineinfo_left() + return self.lineinfo_last_consumed +end + +---------------------------------------------------------------------- +-- Create a new lexstream. +---------------------------------------------------------------------- +function lexer :newstream (src_or_stream, name) + name = name or "?" + if type(src_or_stream)=='table' then -- it's a stream + return setmetatable ({ }, self) :takeover (src_or_stream) + elseif type(src_or_stream)=='string' then -- it's a source string + local src = src_or_stream + local pos1 = M.new_position(1, 1, 1, name) + local stream = { + src_name = name; -- Name of the file + src = src; -- The source, as a single string + peeked = { }; -- Already peeked, but not discarded yet, tokens + i = 1; -- Character offset in src + attached_comments = { },-- comments accumulator + lineinfo_last_extracted = pos1, + lineinfo_last_consumed = pos1, + posfact = M.new_position_factory (src_or_stream, name) + } + setmetatable (stream, self) + + -- Skip initial sharp-bang for Unix scripts + -- FIXME: redundant with mlp.chunk() + if src and src :match "^#!" then + local endofline = src :find "\n" + stream.i = endofline and (endofline + 1) or #src + end + return stream + else + assert(false, ":newstream() takes a source string or a stream, not a ".. + type(src_or_stream)) + end +end + +---------------------------------------------------------------------- +-- If there's no ... args, return the token a (whose truth value is +-- true) if it's a `Keyword{ }, or nil. If there are ... args, they +-- have to be strings. if the token a is a keyword, and it's content +-- is one of the ... args, then returns it (it's truth value is +-- true). If no a keyword or not in ..., return nil. +---------------------------------------------------------------------- +function lexer :is_keyword (a, ...) + if not a or a.tag ~= "Keyword" then return false end + local words = {...} + if #words == 0 then return a[1] end + for _, w in ipairs (words) do + if w == a[1] then return w end + end + return false +end + +---------------------------------------------------------------------- +-- Cause an error if the next token isn't a keyword whose content +-- is listed among ... args (which have to be strings). +---------------------------------------------------------------------- +function lexer :check (...) + local words = {...} + local a = self :next() + local function err () + error ("Got " .. tostring (a) .. + ", expected one of these keywords : '" .. + table.concat (words,"', '") .. "'") end + if not a or a.tag ~= "Keyword" then err () end + if #words == 0 then return a[1] end + for _, w in ipairs (words) do + if w == a[1] then return w end + end + err () +end + +---------------------------------------------------------------------- +-- +---------------------------------------------------------------------- +function lexer :clone() + local alpha_clone, sym_clone = { }, { } + for word in pairs(self.alpha) do alpha_clone[word]=true end + for letter, list in pairs(self.sym) do sym_clone[letter] = { unpack(list) } end + local clone = { alpha=alpha_clone, sym=sym_clone } + setmetatable(clone, self) + clone.__index = clone + return clone +end + +---------------------------------------------------------------------- +-- Cancel everything left in a lexer, all subsequent attempts at +-- `:peek()` or `:next()` will return `Eof`. +---------------------------------------------------------------------- +function lexer :kill() + self.i = #self.src+1 + self.peeked = { } + self.attached_comments = { } + self.lineinfo_last = self.posfact :get_position (#self.src+1) +end + +return M diff --git a/metalua/loader.lua b/metalua/loader.lua new file mode 100644 index 0000000..5a79a4c --- /dev/null +++ b/metalua/loader.lua @@ -0,0 +1,128 @@ +-------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +-------------------------------------------------------------------------------- + +local M = require "package" -- extend Lua's basic "package" module + +M.metalua_extension_prefix = 'metalua.extension.' + +-- Initialize package.mpath from package.path +M.mpath = M.mpath or os.getenv 'LUA_MPATH' or + (M.path..";") :gsub("%.(lua[:;])", ".m%1") :sub(1, -2) + +M.mcache = M.mcache or os.getenv 'LUA_MCACHE' + +---------------------------------------------------------------------- +-- resc(k) returns "%"..k if it's a special regular expression char, +-- or just k if it's normal. +---------------------------------------------------------------------- +local regexp_magic = { } +for k in ("^$()%.[]*+-?") :gmatch "." do regexp_magic[k]="%"..k end + +local function resc(k) return regexp_magic[k] or k end + +---------------------------------------------------------------------- +-- Take a Lua module name, return the open file and its name, +-- or and an error message. +---------------------------------------------------------------------- +function M.findfile(name, path_string) + local config_regexp = ("([^\n])\n"):rep(5):sub(1, -2) + local dir_sep, path_sep, path_mark, execdir, igmark = + M.config :match (config_regexp) + name = name:gsub ('%.', dir_sep) + local errors = { } + local path_pattern = string.format('[^%s]+', resc(path_sep)) + for path in path_string:gmatch (path_pattern) do + --printf('path = %s, rpath_mark=%s, name=%s', path, resc(path_mark), name) + local filename = path:gsub (resc (path_mark), name) + --printf('filename = %s', filename) + local file = io.open (filename, 'r') + if file then return file, filename end + table.insert(errors, string.format("\tno lua file %q", filename)) + end + return false, '\n'..table.concat(errors, "\n")..'\n' +end + +---------------------------------------------------------------------- +-- Before compiling a metalua source module, try to find and load +-- a more recent bytecode dump. Requires lfs +---------------------------------------------------------------------- +local function metalua_cache_loader(name, src_filename, src) + local mlc = require 'metalua.compiler'.new() + local lfs = require 'lfs' + local dir_sep = M.config:sub(1,1) + local dst_filename = M.mcache :gsub ('%?', (name:gsub('%.', dir_sep))) + local src_a = lfs.attributes(src_filename) + local src_date = src_a and src_a.modification or 0 + local dst_a = lfs.attributes(dst_filename) + local dst_date = dst_a and dst_a.modification or 0 + local delta = dst_date - src_date + local bytecode, file, msg + if delta <= 0 then + print "NEED TO RECOMPILE" + bytecode = mlc :src_to_bytecode (src, name) + for x in dst_filename :gmatch('()'..dir_sep) do + lfs.mkdir(dst_filename:sub(1,x)) + end + file, msg = io.open(dst_filename, 'wb') + if not file then error(msg) end + file :write (bytecode) + file :close() + else + file, msg = io.open(dst_filename, 'rb') + if not file then error(msg) end + bytecode = file :read '*a' + file :close() + end + return mlc :bytecode_to_function (bytecode) +end + +---------------------------------------------------------------------- +-- Load a metalua source file. +---------------------------------------------------------------------- +function M.metalua_loader (name) + local file, filename_or_msg = M.findfile (name, M.mpath) + if not file then return filename_or_msg end + local luastring = file:read '*a' + file:close() + if M.mcache and pcall(require, 'lfs') then + return metalua_cache_loader(name, filename_or_msg, luastring) + else return require 'metalua.compiler'.new() :src_to_function (luastring, name) end +end + + +---------------------------------------------------------------------- +-- Placed after lua/luac loader, so precompiled files have +-- higher precedence. +---------------------------------------------------------------------- +table.insert(M.loaders, M.metalua_loader) + +---------------------------------------------------------------------- +-- Load an extension. +---------------------------------------------------------------------- +function extension (name, mlp) + local complete_name = M.metalua_extension_prefix..name + local extend_func = require (complete_name) + if not mlp.extensions[complete_name] then + local ast =extend_func(mlp) + mlp.extensions[complete_name] =extend_func + return ast + end +end + +return M diff --git a/metalua/pprint.lua b/metalua/pprint.lua new file mode 100644 index 0000000..73a842b --- /dev/null +++ b/metalua/pprint.lua @@ -0,0 +1,295 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +---------------------------------------------------------------------- + +---------------------------------------------------------------------- +---------------------------------------------------------------------- +-- +-- Lua objects pretty-printer +-- +---------------------------------------------------------------------- +---------------------------------------------------------------------- + +local M = { } + +M.DEFAULT_CFG = { + hide_hash = false; -- Print the non-array part of tables? + metalua_tag = true; -- Use Metalua's backtick syntax sugar? + fix_indent = nil; -- If a number, number of indentation spaces; + -- If false, indent to the previous brace. + line_max = nil; -- If a number, tries to avoid making lines with + -- more than this number of chars. + initial_indent = 0; -- If a number, starts at this level of indentation + keywords = { }; -- Set of keywords which must not use Lua's field + -- shortcuts {["foo"]=...} -> {foo=...} +} + +local function valid_id(cfg, x) + if type(x) ~= "string" then return false end + if not x:match "^[a-zA-Z_][a-zA-Z0-9_]*$" then return false end + if cfg.keywords and cfg.keywords[x] then return false end + return true +end + +local __tostring_cache = setmetatable({ }, {__mode='k'}) + +-- Retrieve the string produced by `__tostring` metamethod if present, +-- return `false` otherwise. Cached in `__tostring_cache`. +local function __tostring(x) + local the_string = __tostring_cache[x] + if the_string~=nil then return the_string end + local mt = getmetatable(x) + if mt then + local __tostring = mt.__tostring + if __tostring then + the_string = __tostring(x) + __tostring_cache[x] = the_string + return the_string + end + end + if x~=nil then __tostring_cache[x] = false end -- nil is an illegal key + return false +end + +local xlen -- mutually recursive with `xlen_type` + +local xlen_cache = setmetatable({ }, {__mode='k'}) + +-- Helpers for the `xlen` function +local xlen_type = { + ["nil"] = function ( ) return 3 end; + number = function (x) return #tostring(x) end; + boolean = function (x) return x and 4 or 5 end; + string = function (x) return #string.format("%q",x) end; +} + +function xlen_type.table (adt, cfg, nested) + local custom_string = __tostring(adt) + if custom_string then return #custom_string end + + -- Circular referenced objects are printed with the plain + -- `tostring` function in nested positions. + if nested [adt] then return #tostring(adt) end + nested [adt] = true + + local has_tag = cfg.metalua_tag and valid_id(cfg, adt.tag) + local alen = #adt + local has_arr = alen>0 + local has_hash = false + local x = 0 + + if not cfg.hide_hash then + -- first pass: count hash-part + for k, v in pairs(adt) do + if k=="tag" and has_tag then + -- this is the tag -> do nothing! + elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 and k>0 then + -- array-part pair -> do nothing! + else + has_hash = true + if valid_id(cfg, k) then x=x+#k + else x = x + xlen (k, cfg, nested) + 2 end -- count surrounding brackets + x = x + xlen (v, cfg, nested) + 5 -- count " = " and ", " + end + end + end + + for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", " + + nested[adt] = false -- No more nested calls + + if not (has_tag or has_arr or has_hash) then return 3 end + if has_tag then x=x+#adt.tag+1 end + if not (has_arr or has_hash) then return x end + if not has_hash and alen==1 and type(adt[1])~="table" then + return x-2 -- substract extraneous ", " + end + return x+2 -- count "{ " and " }", substract extraneous ", " +end + + +-- Compute the number of chars it would require to display the table +-- on a single line. Helps to decide whether some carriage returns are +-- required. Since the size of each sub-table is required many times, +-- it's cached in [xlen_cache]. +xlen = function (x, cfg, nested) + -- no need to compute length for 1-line prints + if not cfg.line_max then return 0 end + nested = nested or { } + if x==nil then return #"nil" end + local len = xlen_cache[x] + if len then return len end + local f = xlen_type[type(x)] + if not f then return #tostring(x) end + len = f (x, cfg, nested) + xlen_cache[x] = len + return len +end + +local function consider_newline(p, len) + if not p.cfg.line_max then return end + if p.current_offset + len <= p.cfg.line_max then return end + if p.indent < p.current_offset then + p:acc "\n"; p:acc ((" "):rep(p.indent)) + p.current_offset = p.indent + end +end + +local acc_value + +local acc_type = { + ["nil"] = function(p) p:acc("nil") end; + number = function(p, adt) p:acc (tostring (adt)) end; + string = function(p, adt) p:acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end; + boolean = function(p, adt) p:acc (adt and "true" or "false") end } + +-- Indentation: +-- * if `cfg.fix_indent` is set to a number: +-- * add this number of space for each level of depth +-- * return to the line as soon as it flushes things further left +-- * if not, tabulate to one space after the opening brace. +-- * as a result, it never saves right-space to return before first element + +function acc_type.table(p, adt) + if p.nested[adt] then p:acc(tostring(adt)); return end + p.nested[adt] = true + + local has_tag = p.cfg.metalua_tag and valid_id(p.cfg, adt.tag) + local alen = #adt + local has_arr = alen>0 + local has_hash = false + + local previous_indent = p.indent + + if has_tag then p:acc("`"); p:acc(adt.tag) end + + local function indent(p) + if not p.cfg.fix_indent then p.indent = p.current_offset + else p.indent = p.indent + p.cfg.fix_indent end + end + + -- First pass: handle hash-part + if not p.cfg.hide_hash then + for k, v in pairs(adt) do + + if has_tag and k=='tag' then -- pass the 'tag' field + elseif type(k)=="number" and k<=alen and k>0 and math.fmod(k,1)==0 then + -- pass array-part keys (consecutive ints less than `#adt`) + else -- hash-part keys + if has_hash then p:acc ", " else -- 1st hash-part pair ever found + p:acc "{ "; indent(p) + end + + -- Determine whether a newline is required + local is_id, expected_len=valid_id(p.cfg, k) + if is_id then expected_len=#k+xlen(v, p.cfg, p.nested)+#" = , " + else expected_len = xlen(k, p.cfg, p.nested)+xlen(v, p.cfg, p.nested)+#"[] = , " end + consider_newline(p, expected_len) + + -- Print the key + if is_id then p:acc(k); p:acc " = " else + p:acc "["; acc_value (p, k); p:acc "] = " + end + + acc_value (p, v) -- Print the value + has_hash = true + end + end + end + + -- Now we know whether there's a hash-part, an array-part, and a tag. + -- Tag and hash-part are already printed if they're present. + if not has_tag and not has_hash and not has_arr then p:acc "{ }"; + elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc + else + assert (has_hash or has_arr) -- special case { } already handled + local no_brace = false + if has_hash and has_arr then p:acc ", " + elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then + -- No brace required; don't print "{", remember not to print "}" + p:acc (" "); acc_value (p, adt[1]) -- indent= indent+(cfg.fix_indent or 0)) + no_brace = true + elseif not has_hash then + -- Braces required, but not opened by hash-part handler yet + p:acc "{ "; indent(p) + end + + -- 2nd pass: array-part + if not no_brace and has_arr then + local expected_len = xlen(adt[1], p.cfg, p.nested) + consider_newline(p, expected_len) + acc_value(p, adt[1]) -- indent+(cfg.fix_indent or 0) + for i=2, alen do + p:acc ", "; + consider_newline(p, xlen(adt[i], p.cfg, p.nested)) + acc_value (p, adt[i]) --indent+(cfg.fix_indent or 0) + end + end + if not no_brace then p:acc " }" end + end + p.nested[adt] = false -- No more nested calls + p.indent = previous_indent +end + + +function acc_value(p, v) + local custom_string = __tostring(v) + if custom_string then p:acc(custom_string) else + local f = acc_type[type(v)] + if f then f(p, v) else p:acc(tostring(v)) end + end +end + + +-- FIXME: new_indent seems to be always nil?!s detection +-- FIXME: accumulator function should be configurable, +-- so that print() doesn't need to bufferize the whole string +-- before starting to print. +function M.tostring(t, cfg) + + cfg = cfg or M.DEFAULT_CFG or { } + + local p = { + cfg = cfg; + indent = 0; + current_offset = cfg.initial_indent or 0; + buffer = { }; + nested = { }; + acc = function(self, str) + table.insert(self.buffer, str) + self.current_offset = self.current_offset + #str + end; + } + acc_value(p, t) + return table.concat(p.buffer) +end + +function M.print(...) return print(M.tostring(...)) end +function M.sprintf(fmt, ...) + local args={...} + for i, v in pairs(args) do + local t=type(v) + if t=='table' then args[i]=M.tostring(v) + elseif t=='nil' then args[i]='nil' end + end + return string.format(fmt, unpack(args)) +end + +function M.printf(...) print(M.sprintf(...)) end + +return M \ No newline at end of file diff --git a/metalua/repl.mlua b/metalua/repl.mlua new file mode 100644 index 0000000..4a39adf --- /dev/null +++ b/metalua/repl.mlua @@ -0,0 +1,108 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-- Keep these global: +PRINT_AST = true +LINE_WIDTH = 60 +PROMPT = "M> " +PROMPT2 = ">> " + +local pp=require 'metalua.pprint' +local M = { } + +mlc = require 'metalua.compiler'.new() + +local readline + +do -- set readline() to a line reader, either editline otr a default + local status, _ = pcall(require, 'editline') + if status then + local rl_handle = editline.init 'metalua' + readline = |p| rl_handle:read(p) + else + local status, rl = pcall(require, 'readline') + if status then + rl.set_options{histfile='~/.metalua_history', keeplines=100, completion=false } + readline = rl.readline + else -- neither editline nor readline available + function readline (p) + io.write (p) + io.flush () + return io.read '*l' + end + end + end +end + +local function reached_eof(lx, msg) + return lx:peek().tag=='Eof' or msg:find "token `Eof" +end + + +function M.run() + pp.printf ("Metalua, interactive REPLoop.\n".. + "(c) 2006-2013 ") + local lines = { } + while true do + local src, lx, ast, f, results, success + repeat + local line = readline(next(lines) and PROMPT2 or PROMPT) + if not line then print(); os.exit(0) end -- line==nil iff eof on stdin + if not next(lines) then + line = line:gsub('^%s*=', 'return ') + end + table.insert(lines, line) + src = table.concat (lines, "\n") + until #line>0 + lx = mlc :src_to_lexstream(src) + success, ast = pcall(mlc.lexstream_to_ast, mlc, lx) + if success then + success, f = pcall(mlc.ast_to_function, mlc, ast, '=stdin') + if success then + results = { xpcall(f, debug.traceback) } + success = table.remove (results, 1) + if success then + -- Success! + for _, x in ipairs(results) do + pp.print(x, {line_max=LINE_WIDTH, metalua_tag=true}) + end + lines = { } + else + print "Evaluation error:" + print (results[1]) + lines = { } + end + else + print "Can't compile into bytecode:" + print (f) + lines = { } + end + else + -- If lx has been read entirely, try to read + -- another line before failing. + if not reached_eof(lx, ast) then + print "Can't compile source into AST:" + print (ast) + lines = { } + end + end + end +end + +return M \ No newline at end of file diff --git a/metalua/treequery.mlua b/metalua/treequery.mlua new file mode 100755 index 0000000..f5b09d2 --- /dev/null +++ b/metalua/treequery.mlua @@ -0,0 +1,467 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +local walk = require 'metalua.treequery.walk' + +local M = { } +-- support for old-style modules +treequery = M + +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- +-- +-- multimap helper mmap: associate a key to a set of values +-- +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- + +local function mmap_add (mmap, node, x) + if node==nil then return false end + local set = mmap[node] + if set then set[x] = true + else mmap[node] = {[x]=true} end +end + +-- currently unused, I throw the whole set away +local function mmap_remove (mmap, node, x) + local set = mmap[node] + if not set then return false + elseif not set[x] then return false + elseif next(set) then set[x]=nil + else mmap[node] = nil end + return true +end + +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- +-- +-- TreeQuery object. +-- +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- + +local ACTIVE_SCOPE = setmetatable({ }, {__mode="k"}) + +-- treequery metatable +local Q = { }; Q.__index = Q + +--- treequery constructor +-- the resultingg object will allow to filter ans operate on the AST +-- @param root the AST to visit +-- @return a treequery visitor instance +function M.treequery(root) + return setmetatable({ + root = root, + unsatisfied = 0, + predicates = { }, + until_up = { }, + from_up = { }, + up_f = false, + down_f = false, + filters = { }, + }, Q) +end + +-- helper to share the implementations of positional filters +local function add_pos_filter(self, position, inverted, inclusive, f, ...) + if type(f)=='string' then f = M.has_tag(f, ...) end + if not inverted then self.unsatisfied += 1 end + local x = { + pred = f, + position = position, + satisfied = false, + inverted = inverted or false, + inclusive = inclusive or false } + table.insert(self.predicates, x) + return self +end + +function Q :if_unknown(f) + self.unknown_handler = f or (||nil) + return self +end + +-- TODO: offer an API for inclusive pos_filters + +--- select nodes which are after one which satisfies predicate f +Q.after = |self, f, ...| add_pos_filter(self, 'after', false, false, f, ...) +--- select nodes which are not after one which satisfies predicate f +Q.not_after = |self, f, ...| add_pos_filter(self, 'after', true, false, f, ...) +--- select nodes which are under one which satisfies predicate f +Q.under = |self, f, ...| add_pos_filter(self, 'under', false, false, f, ...) +--- select nodes which are not under one which satisfies predicate f +Q.not_under = |self, f, ...| add_pos_filter(self, 'under', true, false, f, ...) + +--- select nodes which satisfy predicate f +function Q :filter(f, ...) + if type(f)=='string' then f = M.has_tag(f, ...) end + table.insert(self.filters, f); + return self +end + +--- select nodes which satisfy predicate f +function Q :filter_not(f, ...) + if type(f)=='string' then f = M.has_tag(f, ...) end + table.insert(self.filters, |...| not f(...)) + return self +end + +-- private helper: apply filters and execute up/down callbacks when applicable +function Q :execute() + local cfg = { } + -- TODO: optimize away not_under & not_after by pruning the tree + function cfg.down(...) + --printf ("[down]\t%s\t%s", self.unsatisfied, table.tostring((...))) + ACTIVE_SCOPE[...] = cfg.scope + local satisfied = self.unsatisfied==0 + for _, x in ipairs(self.predicates) do + if not x.satisfied and x.pred(...) then + x.satisfied = true + local node, parent = ... + local inc = x.inverted and 1 or -1 + if x.position=='under' then + -- satisfied from after we get down this node... + self.unsatisfied += inc + -- ...until before we get up this node + mmap_add(self.until_up, node, x) + elseif x.position=='after' then + -- satisfied from after we get up this node... + mmap_add(self.from_up, node, x) + -- ...until before we get up this node's parent + mmap_add(self.until_up, parent, x) + elseif x.position=='under_or_after' then + -- satisfied from after we get down this node... + self.satisfied += inc + -- ...until before we get up this node's parent... + mmap_add(self.until_up, parent, x) + else + error "position not understood" + end -- position + if x.inclusive then satisfied = self.unsatisfied==0 end + end -- predicate passed + end -- for predicates + + if satisfied then + for _, f in ipairs(self.filters) do + if not f(...) then satisfied=false; break end + end + if satisfied and self.down_f then self.down_f(...) end + end + end + + function cfg.up(...) + --printf ("[up]\t%s", table.tostring((...))) + + -- Remove predicates which are due before we go up this node + local preds = self.until_up[...] + if preds then + for x, _ in pairs(preds) do + local inc = x.inverted and -1 or 1 + self.unsatisfied += inc + x.satisfied = false + end + self.until_up[...] = nil + end + + -- Execute the up callback + -- TODO: cache the filter passing result from the down callback + -- TODO: skip if there's no callback + local satisfied = self.unsatisfied==0 + if satisfied then + for _, f in ipairs(self.filters) do + if not f(self, ...) then satisfied=false; break end + end + if satisfied and self.up_f then self.up_f(...) end + end + + -- Set predicate which are due after we go up this node + local preds = self.from_up[...] + if preds then + for p, _ in pairs(preds) do + local inc = p.inverted and 1 or -1 + self.unsatisfied += inc + end + self.from_up[...] = nil + end + ACTIVE_SCOPE[...] = nil + end + + function cfg.binder(id_node, ...) + --printf(" >>> Binder called on %s, %s", table.tostring(id_node), + -- table.tostring{...}:sub(2,-2)) + cfg.down(id_node, ...) + cfg.up(id_node, ...) + --printf("down/up on binder done") + end + + cfg.unknown = self.unknown_handler + + --function cfg.occurrence (binder, occ) + -- if binder then OCC2BIND[occ] = binder[1] end + --printf(" >>> %s is an occurrence of %s", occ[1], table.tostring(binder and binder[2])) + --end + + --function cfg.binder(...) cfg.down(...); cfg.up(...) end + return walk.guess(cfg, self.root) +end + +--- Execute a function on each selected node +-- @down: function executed when we go down a node, i.e. before its children +-- have been examined. +-- @up: function executed when we go up a node, i.e. after its children +-- have been examined. +function Q :foreach(down, up) + if not up and not down then + error "iterator missing" + end + self.up_f = up + self.down_f = down + return self :execute() +end + +--- Return the list of nodes selected by a given treequery. +function Q :list() + local acc = { } + self :foreach(|x| table.insert(acc, x)) + return acc +end + +--- Return the first matching element +-- TODO: dirty hack, to implement properly with a 'break' return. +-- Also, it won't behave correctly if a predicate causes an error, +-- or if coroutines are involved. +function Q :first() + local result = { } + local function f(...) result = {...}; error() end + pcall(|| self :foreach(f)) + return unpack(result) +end + +--- Pretty printer for queries +function Q :__tostring() return "" end + +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- +-- +-- Predicates. +-- +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- + +--- Return a predicate which is true if the tested node's tag is among the +-- one listed as arguments +-- @param ... a sequence of tag names +function M.has_tag(...) + local args = {...} + if #args==1 then + local tag = ... + return (|node| node.tag==tag) + --return function(self, node) printf("node %s has_tag %s?", table.tostring(node), tag); return node.tag==tag end + else + local tags = { } + for _, tag in ipairs(args) do tags[tag]=true end + return function(node) + local node_tag = node.tag + return node_tag and tags[node_tag] + end + end +end + +--- Predicate to test whether a node represents an expression. +M.is_expr = M.has_tag('Nil', 'Dots', 'True', 'False', 'Number','String', + 'Function', 'Table', 'Op', 'Paren', 'Call', 'Invoke', + 'Id', 'Index') + +-- helper for is_stat +local STAT_TAGS = { Do=1, Set=1, While=1, Repeat=1, If=1, Fornum=1, + Forin=1, Local=1, Localrec=1, Return=1, Break=1 } + +--- Predicate to test whether a node represents a statement. +-- It is context-aware, i.e. it recognizes `Call and `Invoke nodes +-- used in a statement context as such. +function M.is_stat(node, parent) + local tag = node.tag + if not tag then return false + elseif STAT_TAGS[tag] then return true + elseif tag=='Call' or tag=='Invoke' then return parent.tag==nil + else return false end +end + +--- Predicate to test whether a node represents a statements block. +function M.is_block(node) return node.tag==nil end + +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- +-- +-- Variables and scopes. +-- +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- + +local BINDER_PARENT_TAG = { + Local=true, Localrec=true, Forin=true, Function=true } + +--- Test whether a node is a binder. This is local predicate, although it +-- might need to inspect the parent node. +function M.is_binder(node, parent) + --printf('is_binder(%s, %s)', table.tostring(node), table.tostring(parent)) + if node.tag ~= 'Id' or not parent then return false end + if parent.tag=='Fornum' then return parent[1]==node end + if not BINDER_PARENT_TAG[parent.tag] then return false end + for _, binder in ipairs(parent[1]) do + if binder==node then return true end + end + return false +end + +--- Retrieve the binder associated to an occurrence within root. +-- @param occurrence an Id node representing an occurrence in `root`. +-- @param root the tree in which `node` and its binder occur. +-- @return the binder node, and its ancestors up to root if found. +-- @return nil if node is global (or not an occurrence) in `root`. +function M.binder(occurrence, root) + local cfg, id_name, result = { }, occurrence[1], { } + function cfg.occurrence(id) + if id == occurrence then result = cfg.scope :get(id_name) end + -- TODO: break the walker + end + walk.guess(cfg, root) + return unpack(result) +end + +--- Predicate to filter occurrences of a given binder. +-- Warning: it relies on internal scope book-keeping, +-- and for this reason, it only works as query method argument. +-- It won't work outside of a query. +-- @param binder the binder whose occurrences must be kept by predicate +-- @return a predicate + +-- function M.is_occurrence_of(binder) +-- return function(node, ...) +-- if node.tag ~= 'Id' then return nil end +-- if M.is_binder(node, ...) then return nil end +-- local scope = ACTIVE_SCOPE[node] +-- if not scope then return nil end +-- local result = scope :get (node[1]) or { } +-- if result[1] ~= binder then return nil end +-- return unpack(result) +-- end +-- end + +function M.is_occurrence_of(binder) + return function(node, ...) + local b = M.get_binder(node) + return b and b==binder + end +end + +function M.get_binder(occurrence, ...) + if occurrence.tag ~= 'Id' then return nil end + if M.is_binder(occurrence, ...) then return nil end + local scope = ACTIVE_SCOPE[occurrence] + local binder_hierarchy = scope :get(occurrence[1]) + return unpack (binder_hierarchy or { }) +end + +--- Transform a predicate on a node into a predicate on this node's +-- parent. For instance if p tests whether a node has property P, +-- then parent(p) tests whether this node's parent has property P. +-- The ancestor level is precised with n, with 1 being the node itself, +-- 2 its parent, 3 its grand-parent etc. +-- @param[optional] n the parent to examine, default=2 +-- @param pred the predicate to transform +-- @return a predicate +function M.parent(n, pred, ...) + if type(n)~='number' then n, pred = 2, n end + if type(pred)=='string' then pred = M.has_tag(pred, ...) end + return function(self, ...) + return select(n, ...) and pred(self, select(n, ...)) + end +end + +--- Transform a predicate on a node into a predicate on this node's +-- n-th child. +-- @param n the child's index number +-- @param pred the predicate to transform +-- @return a predicate +function M.child(n, pred) + return function(node, ...) + local child = node[n] + return child and pred(child, node, ...) + end +end + +--- Predicate to test the position of a node in its parent. +-- The predicate succeeds if the node is the n-th child of its parent, +-- and a <= n <= b. +-- nth(a) is equivalent to nth(a, a). +-- Negative indices are admitted, and count from the last child, +-- as done for instance by string.sub(). +-- +-- TODO: This is wrong, this tests the table relationship rather than the +-- AST node relationship. +-- Must build a getindex helper, based on pattern matching, then build +-- the predicate around it. +-- +-- @param a lower bound +-- @param a upper bound +-- @return a predicate +function M.is_nth(a, b) + b = b or a + return function(self, node, parent) + if not parent then return false end + local nchildren = #parent + local a = a<=0 and nchildren+a+1 or a + if a>nchildren then return false end + local b = b<=0 and nchildren+b+1 or b>nchildren and nchildren or b + for i=a,b do if parent[i]==node then return true end end + return false + end +end + + +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- +-- +-- Comments parsing. +-- +-- ----------------------------------------------------------------------------- +-- ----------------------------------------------------------------------------- + +local comment_extractor = |which_side| function (node) + local x = node.lineinfo + x = x and x[which_side] + x = x and x.comments + if not x then return nil end + local lines = { } + for _, record in ipairs(x) do + table.insert(lines, record[1]) + end + return table.concat(lines, '\n') +end + +M.comment_prefix = comment_extractor 'first' +M.comment_suffix = comment_extractor 'last' + + +--- Shortcut for the query constructor +function M :__call(...) return self.treequery(...) end +setmetatable(M, M) + +return M diff --git a/metalua/treequery/walk.mlua b/metalua/treequery/walk.mlua new file mode 100755 index 0000000..67dacfd --- /dev/null +++ b/metalua/treequery/walk.mlua @@ -0,0 +1,257 @@ +------------------------------------------------------------------------------- +-- Copyright (c) 2006-2013 Fabien Fleutot and others. +-- +-- All rights reserved. +-- +-- This program and the accompanying materials are made available +-- under the terms of the Eclipse Public License v1.0 which +-- accompanies this distribution, and is available at +-- http://www.eclipse.org/legal/epl-v10.html +-- +-- This program and the accompanying materials are also made available +-- under the terms of the MIT public license which accompanies this +-- distribution, and is available at http://www.lua.org/license.html +-- +-- Contributors: +-- Fabien Fleutot - API and implementation +-- +------------------------------------------------------------------------------- + +-- Low level AST traversal library. +-- This library is a helper for the higher-level treequery library. +-- It walks through every node of an AST, depth-first, and executes +-- some callbacks contained in its cfg config table: +-- +-- * cfg.down(...) is called when it walks down a node, and receive as +-- parameters the node just entered, followed by its parent, grand-parent +-- etc. until the root node. +-- +-- * cfg.up(...) is called when it walks back up a node, and receive as +-- parameters the node just entered, followed by its parent, grand-parent +-- etc. until the root node. +-- +-- * cfg.occurrence(binder, id_node, ...) is called when it visits an `Id{ } +-- node which isn't a local variable creator. binder is a reference to its +-- binder with its context. The binder is the `Id{ } node which created +-- this local variable. By "binder and its context", we mean a list starting +-- with the `Id{ }, and followed by every ancestor of the binder node, up until +-- the common root node. +-- binder is nil if the variable is global. +-- id_node is followed by its ancestor, up until the root node. +-- +-- cfg.scope is maintained during the traversal, associating a +-- variable name to the binder which creates it in the context of the +-- node currently visited. +-- +-- walk.traverse.xxx functions are in charge of the recursive descent into +-- children nodes. They're private helpers. +-- +-- corresponding walk.xxx functions also take care of calling cfg callbacks. + +-{ extension ("match", ...) } + +local pp = require 'metalua.pprint' + +local M = { traverse = { }; tags = { }; debug = false } + +local function table_transpose(t) + local tt = { }; for a, b in pairs(t) do tt[b]=a end; return tt +end + +-------------------------------------------------------------------------------- +-- Standard tags: can be used to guess the type of an AST, or to check +-- that the type of an AST is respected. +-------------------------------------------------------------------------------- +M.tags.stat = table_transpose{ + 'Do', 'Set', 'While', 'Repeat', 'Local', 'Localrec', 'Return', + 'Fornum', 'Forin', 'If', 'Break', 'Goto', 'Label', + 'Call', 'Invoke' } +M.tags.expr = table_transpose{ + 'Paren', 'Call', 'Invoke', 'Index', 'Op', 'Function', 'Stat', + 'Table', 'Nil', 'Dots', 'True', 'False', 'Number', 'String', 'Id' } + +-------------------------------------------------------------------------------- +-- These [M.traverse.xxx()] functions are in charge of actually going through +-- ASTs. At each node, they make sure to call the appropriate walker. +-------------------------------------------------------------------------------- +function M.traverse.stat (cfg, x, ...) + if M.debug then pp.printf("traverse stat %s", x) end + local ancestors = {...} + local B = |y| M.block (cfg, y, x, unpack(ancestors)) -- Block + local S = |y| M.stat (cfg, y, x, unpack(ancestors)) -- Statement + local E = |y| M.expr (cfg, y, x, unpack(ancestors)) -- Expression + local EL = |y| M.expr_list (cfg, y, x, unpack(ancestors)) -- Expression List + local IL = |y| M.binder_list (cfg, y, x, unpack(ancestors)) -- Id binders List + local OS = || cfg.scope :save() -- Open scope + local CS = || cfg.scope :restore() -- Close scope + + match x with + | {...} if x.tag == nil -> for _, y in ipairs(x) do M.stat(cfg, y, ...) end + -- no tag --> node not inserted in the history ancestors + | `Do{...} -> OS(x); for _, y in ipairs(x) do S(y) end; CS(x) + | `Set{ lhs, rhs } -> EL(lhs); EL(rhs) + | `While{ cond, body } -> E(cond); OS(); B(body); CS() + | `Repeat{ body, cond } -> OS(body); B(body); E(cond); CS(body) + | `Local{ lhs } -> IL(lhs) + | `Local{ lhs, rhs } -> EL(rhs); IL(lhs) + | `Localrec{ lhs, rhs } -> IL(lhs); EL(rhs) + | `Fornum{ i, a, b, body } -> E(a); E(b); OS(); IL{i}; B(body); CS() + | `Fornum{ i, a, b, c, body } -> E(a); E(b); E(c); OS(); IL{i}; B(body); CS() + | `Forin{ i, rhs, body } -> EL(rhs); OS(); IL(i); B(body); CS() + | `If{...} -> + for i=1, #x-1, 2 do + E(x[i]); OS(); B(x[i+1]); CS() + end + if #x%2 == 1 then + OS(); B(x[#x]); CS() + end + | `Call{...}|`Invoke{...}|`Return{...} -> EL(x) + | `Break | `Goto{ _ } | `Label{ _ } -> -- nothing + | { tag=tag, ...} if M.tags.stat[tag]-> + M.malformed (cfg, x, unpack (ancestors)) + | _ -> + M.unknown (cfg, x, unpack (ancestors)) + end +end + +function M.traverse.expr (cfg, x, ...) + if M.debug then pp.printf("traverse expr %s", x) end + local ancestors = {...} + local B = |y| M.block (cfg, y, x, unpack(ancestors)) -- Block + local S = |y| M.stat (cfg, y, x, unpack(ancestors)) -- Statement + local E = |y| M.expr (cfg, y, x, unpack(ancestors)) -- Expression + local EL = |y| M.expr_list (cfg, y, x, unpack(ancestors)) -- Expression List + local IL = |y| M.binder_list (cfg, y, x, unpack(ancestors)) -- Id binders list + local OS = || cfg.scope :save() -- Open scope + local CS = || cfg.scope :restore() -- Close scope + + match x with + | `Paren{ e } -> E(e) + | `Call{...} | `Invoke{...} -> EL(x) + | `Index{ a, b } -> E(a); E(b) + | `Op{ opid, ... } -> E(x[2]); if #x==3 then E(x[3]) end + | `Function{ params, body } -> OS(body); IL(params); B(body); CS(body) + | `Stat{ b, e } -> OS(b); B(b); E(e); CS(b) + | `Id{ name } -> M.occurrence(cfg, x, unpack(ancestors)) + | `Table{ ... } -> + for i = 1, #x do match x[i] with + | `Pair{ k, v } -> E(k); E(v) + | v -> E(v) + end end + | `Nil|`Dots|`True|`False|`Number{_}|`String{_} -> -- terminal node + | { tag=tag, ...} if M.tags.expr[tag]-> M.malformed (cfg, x, unpack (ancestors)) + | _ -> M.unknown (cfg, x, unpack (ancestors)) + end +end + +function M.traverse.block (cfg, x, ...) + assert(type(x)=='table', "traverse.block() expects a table") + if x.tag then M.malformed(cfg, x, ...) + else for _, y in ipairs(x) do M.stat(cfg, y, x, ...) end + end +end + +function M.traverse.expr_list (cfg, x, ...) + assert(type(x)=='table', "traverse.expr_list() expects a table") + -- x doesn't appear in the ancestors + for _, y in ipairs(x) do M.expr(cfg, y, ...) end +end + +function M.malformed(cfg, x, ...) + local f = cfg.malformed or cfg.error + if f then f(x, ...) else + error ("Malformed node of tag "..(x.tag or '(nil)')) + end +end + +function M.unknown(cfg, x, ...) + local f = cfg.unknown or cfg.error + if f then f(x, ...) else + error ("Unknown node tag "..(x.tag or '(nil)')) + end +end + +function M.occurrence(cfg, x, ...) + if cfg.occurrence then cfg.occurrence(cfg.scope :get(x[1]), x, ...) end +end + +-- TODO: Is it useful to call each error handling function? +function M.binder_list (cfg, id_list, ...) + local f = cfg.binder + local ferror = cfg.error or cfg.malformed or cfg.unknown + for i, id_node in ipairs(id_list) do + if id_node.tag == 'Id' then + cfg.scope :set (id_node[1], { id_node, ... }) + if f then f(id_node, ...) end + elseif i==#id_list and id_node.tag=='Dots' then + -- Do nothing, those are valid `Dots + elseif ferror then + -- Traverse error handling function + ferror(id_node, ...) + else + error("Invalid binders list") + end + end +end + +---------------------------------------------------------------------- +-- Generic walker generator. +-- * if `cfg' has an entry matching the tree name, use this entry +-- * if not, try to use the entry whose name matched the ast kind +-- * if an entry is a table, look for 'up' and 'down' entries +-- * if it is a function, consider it as a `down' traverser. +---------------------------------------------------------------------- +local walker_builder = function(traverse) + assert(traverse) + return function (cfg, ...) + if not cfg.scope then cfg.scope = M.newscope() end + local down, up = cfg.down, cfg.up + local broken = down and down(...) + if broken ~= 'break' then M.traverse[traverse] (cfg, ...) end + if up then up(...) end + end +end + +---------------------------------------------------------------------- +-- Declare [M.stat], [M.expr], [M.block] and [M.expr_list] +---------------------------------------------------------------------- +for _, w in ipairs{ "stat", "expr", "block" } do --, "malformed", "unknown" } do + M[w] = walker_builder (w, M.traverse[w]) +end + +-- Don't call up/down callbacks on expr lists +M.expr_list = M.traverse.expr_list + + +---------------------------------------------------------------------- +-- Try to guess the type of the AST then choose the right walkker. +---------------------------------------------------------------------- +function M.guess (cfg, x, ...) + assert(type(x)=='table', "arg #2 in a walker must be an AST") + if M.tags.expr[x.tag] then return M.expr(cfg, x, ...) end + if M.tags.stat[x.tag] then return M.stat(cfg, x, ...) end + if not x.tag then return M.block(cfg, x, ...) end + error ("Can't guess the AST type from tag "..(x.tag or '')) +end + +local S = { }; S.__index = S + +function M.newscope() + local instance = { current = { } } + instance.stack = { instance.current } + setmetatable (instance, S) + return instance +end + +function S :save(...) + local current_copy = { } + for a, b in pairs(self.current) do current_copy[a]=b end + table.insert (self.stack, current_copy) + if ... then return self :add(...) end +end + +function S :restore() self.current = table.remove (self.stack) end +function S :get (var_name) return self.current[var_name] end +function S :set (key, val) self.current[key] = val end + +return M diff --git a/src/bin/lua.exe b/src/bin/lua.exe deleted file mode 100644 index dc1c2c35cad8b237d692009417d3df049c69d659..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45056 zcmeHw3w%^XmUnfhlXTtz5(qETVxv(W20}291QH+!%A=u^4#*>Pl1@4yNymO5C`zCc zAhsDF<2dR(gu&fWcbs)TM`hH}Xc*BQboj;>voa&Ii!)b2#Ze(BQu{x3@9o^~bRb0B z{l5LBpl(&2I(6#QsZ*z3w{sWYa2JbWj2YlLXKXJ*az)~K?uUfzZErMH*_)bNUQ?yRY7aRmRe1Xu4>2Rq@;MWLOS)#r?-^; zWNVh{J*&=Sl_Bj}^=Xz3;ScXQEJDw!udut%k*7pSl!EJ~N4GcslZq(z42k3*OWpH1l^IgT{RhnyO+Cy*dKIFg@8 zh%_DwTnR+((P@lXD2B%q6#_J%hY56wv?a4aQl|xk#wU9n8@&LPj=+N$GASVf)tMMu zH`!fn_u3hIH~~=5?2&0gCi&!9O3aw@F_mF;Cb@s_Zb!RderwgVd`yCF)-0$#Kwy@J@<~teKY)%WjGGV`8(m@KT&OA``cSc#P z(q>|AzYe0yKYMF>VBT*TfS)ERKx6*d&Bq|qw7?>Bs=rfbS!R{yiRL8c@V1k;O{2V1 zhd?u}V<8Ka{+2DbO20yBz{gnX&swFkL~5QnOD8Hx1HC^XFSTOrnyv%o7MnCm7EW8I zZ;iFgCjAr8>zVV3ROn5hTRMScxlQ^3Q5;KJ;><=)y`H~WE^FD_0>54#kN~ntN0IdJ zD{A|A%_{FQU_i203Q!_C=tb%;>28sxzMcpKbemH~00+@~527nAyEBT4m~`hioKt6K z%GrVa6C!hXwgn{);zALtadRewNI)2dM* z31(jeG;*uBzz?=6S+Ee&DyyiVr&Z61w5C<}$Oz%B`pp@+RSV_p&{kQ?%AG4n*JSno zI$r5VoAeh%ZITg%>RF&$?jHZAI>`bETJj}fPSW^d+}_{DN1YzpxPj zDe?%)E+p#%L(^%p*m(L~!2}n>kiTo`m}fRSsa{yMhmq~yXKL%LSnC_UCUA$~C+C2) z43xI+BUZbBZs@S+Ivy5Bm_bdN@Ul0lWwRkFzGI#t%BSCHZ2Q4-4slRf-j< zMA(%Na6TI7#=!9NsFXGd!w~{VwUG4xgR&Nx$2d2GhNZ)+JI&MY6#`7le zOoRAf2vUi5Vt@pyTmEYC& zp3iQRDp9R9XhDC86h@sDfoeO&bXyLg+obydrcsEoDMM}2Zh**yzL(Ov19ed@>}rfq zGz{7eG*lN09I`eQBz&7>27o#?v5otpUnDCkSO-G7GDSn|n4-nZ1+;Q`I@18DlL&#P zGcvo6h#E7%M%%++!zKj;z|hv2^1bf^;QXR~T#XRgyBB)i` zgS!s&d*GX6@yPzYNEzF>H=oA-StY4V$yVuY89UtapZSt3zg4g4~rt^Qb;U-(k)AyK1!+e{6U_{WeNzZ(9_LneP6^KOc{wRtc zOWz+GgvDWQ_ZMtu-i*Ma?FJ;;Uh_Sg)48TAkhq-+d809u*L3+eZ#OXS-;s|T{?Vu+ zb{zgy_{ZVjfPWVLOL!J@0Zw-TxLQ;_5>YWRh9E3j7!`SaST5E<`JV<(+-40Jo`b%Z z+`d59a@v6@=5&KE&)=zU$z#6Qz`W-SShQKB4^DA@Y*foeJ?pUO0WTGB=K*MxbowMs z5EzxdB;e+Mv-$CgwIrU3wW15K$kRCg@l+VI2J;wOdA+Vo`Vgub=nh6{ih{+bRY@cuR)njm^lv@F&^$sKhp#L@_^DP6J4Cc$m zIGsOZvw4iK2+vgCOrB1&Ff3@R9NZxUK{k50^yKGg$d}SDPQxUvU8As*(+SWf@X`a) z^c6+@#}jSk(h@Wwa00Wcl@|2@*48QSzGP8E*n>I0fQYsGm3LK1DSMR)d7A@!&;=Wn z0eOvq;EfiiX$Qph?Qo{c!>4wL*OBGkJwrEoa-L zYeWRy=xZfmrL^75(d?YgwZy`Q2a$V#k6%DtI)0vb7Kvwpc;<=cH1W(9&xzuhC7x#S zG>PXJ@yrm06EUM(IgR9xJ=*Q=L1f(YS|!$FN;Yi#Y1?j?90>XUsV{zukBASgKP31)e?;S;zhzz}+pt210y&req`H$MOXT zwq#;^E@e8M5R-E?9+DsRtz3zoAF;pW48{rHi}wrIhkwSSJ+}Zns?!Ck;@{pJ@b|Rj>YcTwt2Y#UBc)%ElYhQfCvP^2Bwyo?6IHW+y5rLia zo`N3>{9g-Rs0qFpp8f<+Quic*&?O+g*Uf*1ZIxR}p;2Ql8nF zL7>wKI>x-C5$XYc%fGi3(Buuj&v|Xe&Td4H4e{^w3ea8xNrgzdAuSU3X<({1o!=5|v;OS040!d!P+l@TEL8O{RYIidw*q+TuN!RHa6RRM9VHzkh zU!t?)aw_j7iqb5I*Fkm$$ocT6Ft}xb<6DnsL1udv6r-}FdGt&# zqUYEGdgkWQvv3-oHFu=16G2j`2*zfMAbO$*#%GCOk{Q8|?~Go7AW*{gEE1t%j|E{z ziE+^6w#VMKX) z8XB|bE&#A|N+X4;N5eA!bT&gBdHNVc{e^_Ud`94$hysq2X!CRv@}Lc71g~|$&>8}4 zYhIP7@rds#|HjlPpE*#JR%nX;{zz|n;iVKL7mlMKwvZJ)pz}pbkAhfE=gS7>TzDB( zM)g9leEcJb)TcQgpSkfgf1zX zEn!25t~e}p>#%=BWuJqsm0jFxHb~1M$Ww<#cb+=5azU4^eD6lE{USZ`_XkdRQUWm; zq4)awS?0`IP~ZX2iAQrIK*UQsM$IL!zc=p2^1GZy|gt?jGz-auhneb zn314MxfjDhU9^a9SWpo4YRWUkX;xdxGdITM{ITt1dB;3s#)eD8nc9NH{L}8ZzzSp3 zpYp$UC;DI3`8%`n-*Nv9XGLqZYg3v}q1yFlov(sXXE6pv^=^oPS^US1u$e-qrj&H& z{0>q*QN4$<+8q)kMlmOib*TWd5sNKvS)jx?NsNCvhomOYjQN2g^Gj0Zse}F_Iz`-p z#V4xUam)r8gQvg;SfUF1q|5nZAuMEP%DpcKUMBmb;A3IXs{|ytkNSB0W;|u?c`lKp z!@Tweb7}|tgd;r-IfI6Pf9~rJl$=orx0iGW+t#J6LuP9kX+WyhJ2z*dHX3cxf1oZp z4Yr}dQ6vu?%Qya#E)Nq^-qa9Zn{WW63Eb>vwcB(q?Px_JyUOd@P~ubF7hHdFe=M!in}=;{DV+?YUIJ+KN@LVLYxzF+P5Uc$SK% zMLZXYXOVaoh-V(2gKkg)gAy2&z@P*MB`_#~K?(f7EP>7WjMczf;EUidhaU?6#Z1Ny z!S92A0{$NO+u^|KZ5UuH=@i?_)+j#@Y(PM@Z?q_e+hgM{51H> z;WOZk@MmzIL}yBe;19q*3;ziG{qQ^CH^4jLZ-8F_p9gshmAa(wXydtHOQvcX|ea7=c$V`jX`G|6G| zcKNncR*WeRXb!siCgYZFg@nT|r=hQD>5! zgSf-bHt@7pdFwVf#BVoD_8J1q^kve5IZo}b0;mYb1|>WWoooHZGz-$eT|Tn$K-XH#}K6exP9C?KvFPaOTL%bP%N59>Wn^a|Q5Td$5fUhuR zjrX8EsGeb0$tK}Zm{sL!1XI=M)h5T4Y*ATp#lj^EOOeeAH3{X1IaO|lU8o42GliTS zO)g)p)70o_bh$TCDU1a<7}yhf-aJh{(o_i)U#`YNLpmQM$U!BfbR5RcSa@=3{sB!s z!KdSxl$@H6mw*HvNJWknM;ZRIKYO7%9DYmG>)$C}3sq&yJ3Afhat*LV~R5LbFq^lZS z9tT??;zVbIy}{?e&y7^NH45QLGPbL!%8r3GTckh>c7?HGk*aI*INZpq6Dg09XB8=; z+@MgJCu4h5j?*Z;sG`ATudejf)HvMu0hNG=!Haj!1issT6G+xEk4Sl4K2#?Epvt8{ zRQefm=_bcbWV#*fYLPDwSudG>mLy|TIUQB&fdfnCm1%5hcEHYwI4V=*v{a<21yv+0 zpBp%fWf&P2mQHb*|C%~bpDgmKnnfmCBvLS{4G!3*tK>W?#wtYI4U4e>6dfXkTAeP} zdbUiYs7{(iY9SNUL58-PN**`V2uoJO?$K3HolP!gh(>QXnl>;tT3=D;v3tFn*r&kr zc|GjLXviA>@f$T(QBmirtf=udK|`$3p1Rs5J1G8#p{Aw*wK4XAp$3FJY_ZigcX?j6 zsBo;V!rthqt#E9tL;wB^G=S1*uWO=Nl}*!CRLME5IvJ&oRjjCL_RgSL=1c$W}MFAVvo}hO&?ibvoulCp3exVc@E| z-skaF)Ku4@-ERP=xdNpd>6emNL&@nHIpqd#O;yOKXm*L2nMpFu3VU;Ng;(^v6?MD4 z6=I@;%qv7!u(@nD@rl-Y9YsYei&w0;fqfoT;cEhK>)8XKf=+QYiAHYILyrN?imFf` zP95ll)O!m_smjsptq^m#!#z#rxrvP+No;VfcU0VD_j+8=murPIylz)RO>+e%xhwSx zX;ndWII2rFR)IpTgW0024Gu>$yIh+)*VWjJwM`I-t&J*Gn_+RgsvI8Bo1?qd;hpPq zyD<%`z^16OCYdv;0eoWlp>!p?F3Rfg_L)6HztA&}RtXTE#qIEbDBGi7=5x3=SsZS$ zmT9VT%ys#w4eWiYyR@!qy#O#aU&BkO==_DSQgby>g+qY;Z!g zim~fUmM;9^ODZdQIZ^2K>rE(=HNX58?xjkx_Bbb}HY zl)#__1|={kfk6ojN?=d|gAy2&z@P*MB`_#~K?(eClz;*6X^LyFmSlvu6lqCCxD#Op z!d(avXk3i=xx!w+^?)8kNaZDj2Ex*@mS_YS2=RM8mW>cW(9t=ANzYmqgu$sG<*n-j zzA*wEZ)CH(s8ER0(OckbS0C^v`ha)#0YBIW{74`06MeuL2`j|Oe5L}Py&Uf)jPJ=j zbQ99l8H#J!jx=?O;ruSppe87MjNv@I2Vi*| zKSI{rsR{5p6YGvNuhKn}j^=bSlnMidqnUP|?pl$0bOebwyQv(vuodIv(k ztgI}ixVU%;-uJ)_2<{qZ&mIHcjP$jXo{zng_{DqW3Jtp+WEf-RzAGkW`tj7BF zE45}{S^HD{@B>Y9Vsbd-1Mf<8$x5hH?;o{ z$b#&~oO6}9Jw5%hZPC#kTMPzi3w}4zAHOv!>I1ar_P~e{!p8PRAN4oFm-Ur@F8JKJ zbBCd9bzeBQ$H!;1#Kb&wvrhMQe|V0B(}J?pFKrna>3!7$9*X+P8z{UjA>oECI^8Fc zcp4B)uRq?Jm}u>bAJR>Xy%BxoYvE{bBK)mdm>Pc!V_;is?41MR@%*}V7PNP8S}ue+ z@Ppsj6g^0`HY}@G5_t8I?2O<&Cgx|~5%0lqgRya6q&Coc1nrpEi{X9(zC*bOc~@oQ zkBh_V@xmR}1jUg`34K3*|M%iU_{6~3te0x$-#CEmD$MXlLPv<|FFoDAk37oslW`5t~$uhoP|IzkQqlSg^L;4L~ z@kVE;gU(N-byX-78!XsJRen2$4B;pK`qw@^)epJx_g{EH-0^n*;~$*-zm%8rkKcQb zKbV^<0FZhRVy~Tf3RmIPK^9}#(KmSwaAN{>8Cgum>{6NOE zPV6s``& zT9=38iFs9{_bUItuB+>*Lz7SawD-mv`9D7RASC~v7A+EOQu!wtztq|qg7b{a#ZR3$ z5dtH6SaY<7^96nv!k4IUv}LEZJp`vYBYaKJU;atQRGp)+WAx|{{$IWOZr%-Ef8+58 zox+-kKT%m3k$>3J_fr~T-_v#~Bi)neF_(1u`Iwyqxtp)?9KK%Emp5h-L zJ}kz;7ax5Tk$f^T1S3Y{D;%UIOWtQk@SafG57RZhp940mPBLHIqFU(Wa4emg((#1owC@iWcM9P5T)zP4`6 zZ_j9X>rZ^3{A=sIXmOqH>2RJ43JTI`pNxIVIOxx!@O;c8PleatAN>h0b1~AeSN{^u zA9${&Ig9oM&?UTK9O`Ia6khjs*y+D^b@88DED`%T96wT;Y-!hy9sGme|6bV6!_Ph| z)(&J-BjFRQ1^d%b?!-CRTJ@X);qdT$+H3bkZ=i6Rf62x~awHd;$0E@NFID0Eoi8oU z3Bv4WB-;Ijk=*;@U2V_1|N5^H`G?+K5zeo6y+isFzV@W`1DzA}m%sDjkE@X(LrW&lOS|a&mGsHxM#*Bs+92 zKKsfeJJTQctLo|?|B==W3l=O$#@cyGcv~X$#J0`r@mSxX7Ji;JX0?C$1PHCM`ugm^K5^4jf6+`oowW}9PNEX zS)J}I_Qwl*b0X|MemQ*Mj?R`M@pU0EvhQRg!yU;z$UAJdKPG1Lg>xvEKp$L#J~D-u z=h!pT+0upZ+ZQg`NU}3DR!QH3Is@L@l9GaZcC1qqaUNhr-)+F!ZawzGz0MuN`J_1+ zZFrEzb9f%LfzFn|*SFH8i0y*TXyxutNLblx+~J&s)MSJd<|#$T028J z(@X9VWU@6j7Qe(hw}TFqL6_>!g+=ExbjCsF1pRIY)&w-KMw**Q-}^&{`rurAsd+SC zsPXfKbWT8fZIUVN`DovS{&|{ome#l9 zlTUW0U;IZ!{pCXHQO=AU7gA;*xMU;IXTdc@Uv0tI`{_XFQyI+Pb7)+s`=5c*p>ojA zSkQ982j_m$x4!r$xES|INfFl&{pAI1m;n2PzeXV5d6?LryB(D_op+$@Vp{XUM!?zfR%2VV^@)?a9=4f$KteJbMX;b+qx zsV^?bzLSjvC9yOH-^-6CN0jzipy&$C>7`UTgW0r zG(z}3VG_JMQx02nB0P(;YJ#7E9#MFLv6S}_9$_q&(w+F5g4(bJix5gv12iGXfDmIZ zD508GLy)IG1i<fGay!08yDo_Z%V3}&3Lyw>q!6Q# z{?sPPMTGAoCeoTv;BkJ2%5xxK^}~h3-^M|+vfzL?n{NWrVdOME&8OJ!~7=Ik+ zUmSYDG4?q0gl|%9o?g!}cX8-9hYoR!36Av?hwgK{&%j}CIL0i8?u)w@obz$$ILCQ0 zo%7K8jAQ=exVPfaf4W1W^)$zMINyfzMLJ`|`bgYU(H$D@7CFWQ#~y{&uHwFp&Z%gv zj=MXKdjO9038%X+*aZ&zEY5Lpw>*$V-*3cNE;{Jl}@#wCP&WA{)=zfgul<3}u?xECBt%d}z&1XCZXg6_WP+?npg=-!U*^=NNTcQ$mdMCa;s|3vp;bf-aQMs(Ll zcLQ{1L-&Dn=SO=Uy8oj)KXOo)LI1y328z89UET9Yj^?S!u|4T?#D*m$^4Qo!J|s4Y zCnYAG1Cz31(oz#ObcZGJb5@fGLlNfm+VUZJvB@DxO(Hf`lTf44=fF=?ku)Kb@2ELX zn{3KR7(RT|sLY9J!Tce?m>hvLWH)VeLR`>|7&To1hczrkY3HOd#vrnYq>su|2r30b zg3CjZMJhf^MoH@thfv-qRswMsB5Yy z09Il|&yEHtXNOHN5x43Vgo}49jN4>KWMvCMGYKDa8wLj=X`}=__#2VR2Y$`g)$=D> z1-`h6)&p0xb~2F&#h(uRQMQJg+HMv2aWkm`)tMsW6ZDcA8DDO(%nr|pjKA(?bmE%Z zC^Jr6?I*@234VkQ_vWYA?{U=nodQ3Bcu9=bM^QMOKtcTZ43<@nYT}I!rqh7#s2p77ONa~q3A{3WWRf0M_<{=eN z=%hPX|G+6sYD?W1#9s?=CShSs#AAi}PfypPiug@I{K~kv^lJ!TxLB;WBUR8C#4l5{ zVW=2iF!*qJ@!=Jes`#zBaq(9Zj$#C5Ynl~~KdMt#DC2pl52( z5S6Ed>w3#{-rCBt+_<tsa?=ZmOcCR#UY3Sq;g$N@L^YBV|u zykDl!7*V1ia@of58R3mc9}USA2g!_8@iBwSZfd5c8S$B-o6JEL<@Z$xbEtxkVCE&G z6Vl^Gjm#7(20)Lph(W1fCPw!Z361s0L1>+lNyaF$n;PUt7?mERq{Ln`?NHKRm7(-2 zdT>Q7DTc=eryb6tVulSP$xV@EkbY?xIwfNb7KN z8n+pZH(;Op7|wK#Zqc6e;|`-GD(bH-2E)&RW5eDPJNAp?=sXE~=^L@%{t$JZ_k2>| z;9dfJ*5jvNI6)Y=!~T3c?gZZGcfH~K>GiK-KW`cc2RP5qLz_PyaNPCA`RD;*u;+=R|f$o~-MH{_nItrcfE6sPl!r`E6MbS6Y~?^(D|yrWI=uTP!g z-^$^<9sOcDH#|e{#1FZ^;26_m>P=uoz5Vf(o{d4IT0^(zD2lnc6Ivm&x3db zLmj=|*-~W!o%^F&$9d~V`&C@Eeo9lHsq%lht*ux6sN)s2K5a<%in#v>-tP{yetNf) z&I0c=nfT+YSNEzPW8v!uMvm-duOa6`wLKILw0_dJN0u)avU%{DYxtMPjs@kU(ntT$ z(P@Z-ja{zhpRfMk-f|0nw6ruxm(Kabe_&xcciBFYPX6D+ftymTMflr z1No^qbe8-u#xZye!qmJ-A&t{N?Apb@`5MbSI11nR=}*OP1;X(o-5=vzTaU3Z;P-w0 z^yHJG-tfn}IMdbEPwND=&EX-&&47OA^uV-f;njwuweQwJp7({ofAxjFTS2-< zc0ZE)T#{rR)ag$4_YM%Y$fdTezu_+2)zBHg*3mrh5bkq`2ECsU-qv%ap{IfV)~({v z??s-!`DXq)<{(-Z+<|);%ER4HE7e7IANN6vF}Bx14|=_e7tRmn%8lrYGb;TH4)Kl* zXo%nEly7hN04UD&6-J71C7+$Y7z13*UN4Yn(<@ysxnb)_*#pMz$TGyZ*5!aKD z^F!MDBE`jTAdSWquuq-mk_XmKU*it%*=@Kd2amx!YP54>Up!NpbFGQO`5|2N;X>?n zT;P2x_ChU)H`4rZXJ%$JmHZ)f^tNIJZSpQGMt6B zWa{|ovvek^dOy`o_vt41BKY8ax^k@!kk;|dVRz<`A;@u&(16 zAHn-^=%Rc#j&(BKjl;fhoEeDy7Umv~aTL5G7x&}18>jgN`$us%j(!5J+ZCVSf_vEzZr}>`N1hhwc68j|D8_^n><^-A_X#Grk6WY(w z{*Lx6G-uFyjn;6qW}@{Jt)po@PwQx!M`@0w^*r5ci#-nJJKA5-9#38Op`-YqAC$p& zqys%Sim(~eF1SEH9qx5j`N$I&NtS6i~B%U&57nK~mxlk}hi=t}2ULTd& zQxvh0n!+-YQN6(+yb)og3B}lRlAqP9G$F(XxlGazFRO){KDM@YX>FdqxUw>Tb){ZC z`8VtI<7-Q;R%>4E^)qTK7H3t?E~s=&*2hnhc}&$Ke=L!#T?D^4w_=ojTs4vk0WhcL zUr+h^vdYTklrJt8`BjL8<_}5ESwQ(laqvTwL-SY9(-*7*R4;A>hxFtxE79whT&th8 z%D!eH<;ypiJ^2LFI+33Nh(ce<8+Xk(IX0XcI2!Skq68&RuOGXjrnaVh6s2(uh3ktj z;om@_tX+w0y&&nAd|?$g>>IsC4Ua48O4nV7Di-j zg+N$4Nkr1+?$3~|%QRe_sS7Q#Z6qpQN;YVtC< ze%zn-;f4}IT%T8HoL@0y_!zb9|L{767TG5F;C)KubqX+goMsOYJmVu+4@TserkuQo z#{3$-j_r$%DBI6-^hh+Q9y*5(udlDPwgvs+JDiS|hw!}qr0Hx^<)J_Lk#H`gJe{?L zpR@J1{4T7~R7Y`@7q$HP4kXCN1WVm4WHtdDfD-l zqmeQMLy_6X;}gsWI?(enH@>IY#A-ne-_^udA{Qc6!(1r$ZvrQqO~bEMX24HlmEw0R z*(?!d=K`k@{~W;g;;WgA-6a0u>y*sL?D$Hh3$$wi&&F?DRQzV-G_g(K-Y&`#dyB&G zo7iQ*SNWNT8a%8@@Y#%->Ofaq3n=AGf|DlT;5(hbHHlVHE!C)_L0}V<+T}&Q8-F|7 zjZltL{S(oPJ@wfHjz?2ZF4{5$o_fQJU+?HpvjZ(7>Li6`w1=cv%bdu^7X^F4Ozcr` zFa=Va0Vp|5Vi z9ckVi!L41$i*!eI1+zCcHmKjeE*v*ya`rfr_+%%(a$PvCtaSdQym9o6?xt!xeQvXG z+$M)--0WFN@db8|$I)2Xu*n1hO`gJW_$2g955Df&X!lHNtgCXnJgyq=Bzzcjrrpyx zdBc=(^!@3&8i&VglfSer*QO=4uo@q)tn+S4!gsYzJxqyjU4tAw`u`063wz>EM?A+z0 zN&{3><@@A?5l*a diff --git a/src/bin/luac.exe b/src/bin/luac.exe deleted file mode 100644 index 6f87524e2118c67296b0c108cc3410dbc738d6a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 208896 zcmeFaeSA|@wl{v#oV14$ngaxg5Fu*RYCBG~Rf{PapiRqLNgA4RNNClP zwsYFYo^GNzSD!nMI&XJouH(%7=H;2ofX-+`p#|SS92JEMzRrmOsiIH>$@5+NBx$Q& zfA{(Qp8KEhIXx%)?7jBdYp=cbT5GSp&$1gf3r0Z@L|mzqAZ*2*{<-cVthNaJ+w<2`wdPnoRJ8oI`t=k;m`PS{X-w}3v`@4>Hk=q@&-tMSe?swdF z$4%e8Y|^BBXBu>Gjj8N~OTKqY=1Lqh-SQ58Cyq_OWU*01{x5_ntDX5mKs*o*s^e{N3TF$rHkeocX| zDeyG~zNWy}6!@9~UsK>~3Vcn0uPN{~1-_=he;WnZr10PZ!RrgzH!l#hSKb!{rO)lv z)DQ5mLvAu?>=QwFZuFma-w{Dzf>*0VRyL^_h1m+Hr0fvAKPO1OI&a8HpjZafwH%ZU zpxom@aiAUL-|$8z@uJ>P1qvq3I!bAbs2YyZ$)l)m8dU+jLO0=F7rK!uIzN-+W#rIt ze)Z!l&OVmY-8U$5cSAV8qg)7^{*8e6>;Fzx^W3#gIS{nmn}Z)oV{DPY#K(b>rTas? zd^&tTS~t9n?EBIK$A4|IUTyM$tzF2Q`0O*(Dn6e01oy@UwsXy@>-$HU4Nrcy8r*f-5ShrT zB~~-c=F6-4>jOdUrE|a!!$?$12HnyGpv8jrq9Adi@~PZDvB?nBo<_GTJ0zuAlAH2E z@1Vktc3L&L8d@GLR3%!rwP4>*FaNoYdO^JYgt%lZa1^n;%^ry&pOrB-2!VcD}9DRQL(!_1SM|34`sXTTI!VBcCcFW zpx8MZ-WfK~ux3^Na}1rxmO7;X^@g{}5b$ZQ>El2dF@z_gfAY2x6EyUf_BDXC$`Of3 zDhHYjey!0As+BOe81SoMr%kPN+O>!Cd8@noBKcd7P#3*^I+dy~J=%RD7_DF_)MaJj zZ%X%pr?N~6j(T9b^1k7M?OwljO#uOr03fI{m)!OYAs1k$wad<;yGMRb+y!!W_l2jF z?gpNgpT>Pb?Q(P?^_I?amacn%_Rnc-0Mu;F^&a)NGT=y_4{E3U+7%NiyE$sE5*z{>?miL84@pSVmf|g&Mnj=45bB%i7J`25F{>a1G$#g}vcm=>@&+pWQ2T;f9q$Np;sz87YQMA- zwqj$$nlvKmQ9`(&^CRY9)A@j9)!wGTwnHpUb)T^OkAH%Me#=vE5YE%*Ec zDV8lA;taQta&{k_?iNBTsFi0xUKC6GJKl#@Qp)@3lm{tgHKpuNrzn(iBc%-JDaml? zCi?l;^v~~xLiF>o^v^(OE&Y5b{j)mMOh0?mKg&a5`gwQy=Ovac#{=4TKjP?s8Wa7R ztT!oH5D2wWRxi&gDT4-Ao6~Cf!7L2PRiMmZrBgf~xFrsuG-UZjXk9t3Fs=YD1y?n$ z@8ZhAWySSFT!JCTD4KF7e96DOFG>5aB}sY*#Se?d`4C)^a!PFRrkt_8pnRxczXXpXSegjW6_cOUp%f&cCZ}s~^kI<{@_$#R& zZH@rEg6k<i+0EgLPxkY?7WPu- zTbaDulR2zEoxefPuk1`IpIuJ*|jR%1uIZA`ph=>wS$5;0~W$5s;M>E9F6CBU=se}dc=Og?pb)?v^`N7d2= zH85Rk{4Kp!){lBGB=oghs`?qCGK6I3d(^myIaBh_*^>95B+6itA|A5=^)kKuz?oDE z{Z3eE-_|pQ-k}!O>p#-;nnER5zh35tROWiU%w>AE%lJX4(ww|QbkUeW zX3M6RQ8^?KrU*y>=D~B^!aKO8&vXly<7&ip2$u;RbsYCga2F&AQYXhCN=6a?BvHaY z6Fu<uKq#5N%}O^T=?T$d!;|OY zSJ?EV12-0^=-&6VlP5e~ObMRi`arC>8}t|I{a)sAd+U6`P&r;{XAWZ^d1{m$&I-fd zbs&EyARW;qhNt!4Pt)(-hD9Bx4NvGPPv|)o3)|M>Rm8N7lIty7`n;Aco+Uv*w`_UV z7ivbT_An}7o-z;!6aJcUct#7A)HslAL%DL+z@ddr+o%%1w=U=lgnmSje1N3vus1e9 z{dta0#%8vrFc7Twhj!5W%lUhA>51+G(Mw}dLy!v^Z6+Zwz@FD`#e}N&W*#M-Bt`Xj z+bh(4{aJYB0f0M5;NCjKg#!n7b+9zWs=%n_5?fI&8V4CEXg+1Tp^gSrAoMYn_#>4V z0E1YSK?41}4w{SMAiLY?4~E1CkgrFFtlVuE!WRcnK`J_jEwnT9AJNxnt(`J`Cj$)H zblaN^jpNnh=jz!J<((0qpRJJsfQ`|h?Z)8qYg5rmu&A=pEQDwHwSVX{hF_z}66xA{ z+T?u$-iJ~O7ZSY^(zYreFu zX{&JM0-=B2@cvXPny1fq{qsJ+ukg}&$LRJA{w_R+EwVD7$fj(gmqoiQTNdQGcD47# z-vq45Qa@CCrSHc6*!-30jQOZYm7(361VKQ04cd;uC(U!ufm#!I5>iEIUkVXpX0;M~ zu_$1Ue<~XHc796l+e?A!r7U0%vJuF!q8DASw&!~T+HY-qI5UsLHdu{=b3IbydZX%* z8d(3Te$w*B6BzZ0BrIU#viU1j-^%f?pM%H{*?g&JzBxW35`NIdyLVHmwkw&}jJ6qH zoa>d=&v}9tU4xrht29`RC1)rdUWv7uji=_eO6wAh$ZLF2^-BFHf!;UNNP0!oUQ#tB z_JYtP8z7&Q&FuI%2)0nVJb9|?Z7-TtEqI=xYtbxXjfiISux81Mgsgs zGPZknIFHG!&J2u%Mj%3!6aGXat)Fs14NByM3zUQ@Y%J~T$LQ&^;qMqE^|ofJ3Q}FW z%k;W*O+;oy|_;E2}BjScP`dL4t!K6axTrurZk~l zhf&Y5bEzKN*iL{(2a=GrNe9?i@v6?q*^LLD?DGpqy27ZT2MLk+xqHkG!IAr5*{ zIby{)Z*vyIsxUomy8==@GjWBT4zG_lF`(5qf!Si1{D4J5htt>qfyC)!R<4CiAT$Q4 zDZ(Ik6Ktgk4&7WU!h{^6s>uivdDjQTU|mQ+17lH%4C`uW%1Ns;27s!kbu>IOSI)~W&m^R7P1gryE9at|I&}QMCvZHt_VS=&Oc|=Av zKNtS8Kq%dzb~(-X#WdXgAb>RGhL-XQ z{=^}hH3JaAEN$~@<)83oDI*s|N(RB5rv2L+lqhn?OfV2$s9g)QRr@{YMMlQA zU&(gh0MdEqg-U>Xx&!Y=edviB@J`uL#yc>XuPxy}CAO1o;+;lwU+Ht&+b_d1*de3y z#`80;Z}R3`orBj2LIJ{@|K*d$`0ntgStz3}h=B`XF3Ws6)(GtqP(lj0?W~yf@6>&D{w$RVBp@CD#5Lsv z5e=|`uvx-DvC#>W?v(@4TXh9e%+r2I;)$BR%*Iw&MA+U`bX?mrm=M*nIVI$jKcCY! zN+sx=O41N{Xe@HhG02VMJ;!LTbLviE^>)xz`vhZ~NLv_Wsd4SZ$;zv*P< zxvCd|>BiMaTs>w;@La*gJ)f)5fRX%#8khVRYGjSY><%+$+0QXh2HY1J=uyr<^;rfA zvH@)-iBa6^_;e+Wu@#7kl_So~WC&J*R4jqGDa71U26aKCz9=Etgv1Io#O5RBAkstF zK+zT8S6jh>-Pf1`x5X$(&|QupLnmML;g3LoC`b)YCONFsvFNzMXpsiWBw` zoX2Lafyk^M%03jhwD$Sg9`AYI*z%)l`0seNt2~UlQSA(||P3%6PYo?Lm#ofP z<|gP<H^Vt(&(tGW`a4|2zw#-17zo36@gZftog>5cboLS<@= zRh=)Z_s}!hu84fKY-zMww!EmkWjy&(*Gu6wyPq|6Fr){Z@&FgdmH`mM(_MiTYN5#=qwY%?(NG!c0Iq?%<^$Iel2$X3kj4QS-(L!HTz?EA@VrB%Jb3Z0hCRL@~I zi0btstCPI2Oaf|+q)ob%N=SrIq-)h0t}M+I6|}wK?eO@d-TtQU@%3$Ug_aSXxoj0h>rnK5Z2=pcahMBJ} zsB!n0r-ap7M~j{WRhg$eW_2?UMw5FK0Q6`8aKa2V?gr4^1YtMQK)|JF(^Gg|Y`kAku^4&) z8kSeP|8m}U8O=)XHhNDu^Jy0B*(|ew%sia9j$l1aOKd-Q1TMCz-Kh=ag|#-p!U*fD zL`9z&qLmPNWPEdgH14>;Yi9!wQ4iUerD*$$Bd%R-R@Vz{=b}C*7MV;sF3Cx;vI)vr zaii%eQmp{oUb=591ZnEzhxM`B4K8g>i-YB{S1f;sTmJAORbJ=y`85xz9guog(?I0S z^J{-D;mxD8F}f`Bwbnl9DgFj!SRi74Ps<*%S82AdwW;1JMm0>J{>3n9Fm z9QqsjGoVcd$vSFG4yDh;7FI%qH}*9e`x?}R?y>U6;~>caGxE>Sw*y++dBp1Q1OVL( zO9!+QWNB1au;pc7Nnb#F55Lvwvcc*KV1WE3d}tIJ_hm*#K=%jK7PEjOJ+uPaA2}46 zEl2y8LT_&cF#`HrnRR=m$FCy@j2yrSuvwsxv$xr8+?_!Q@}7XvI4AgrMH=B=Bw^sS zV&GNbQQ1*GX5i`Uoc8~5A!OqSa5W+nEBd0N%k10GQta9huVem3AS zeG4*%OQUABp|Xc$PsM;|u7L}m5M~do9*h#GIeEzx^iXD%4zCKo4V4et}Ikn7ON|< z4qizlQ&%pH)!5YW_XzDLozM0VDt@$c0(6_(ejIIsV1+=?P zf$D~U_R%x~BA2GrXqb5lgBUFcqvWrj}Fl)5f}WqIrGkuE@)p>caXHx&7@{AK)6>^{mGWi}i&v*%0u z*xBR1B{@^Nd+rhmN|hX)%ZyCyL2I3{jpVap@&$Xk_p#!8)k0Ogmu-(EAxLD*qIN0= z2e3;o*fz)@VyBMs;2~~b%4%tY`aNmQDzfgl>JzIrC?iJ$8z%U*M|3xBZ24p!b>P>2 ziv*HmY*>vV$UtI#xp#pz=!*qpr?IZbV*=BOq!VdzI1mOZuZ6a~P#=#pPuDC@5s}Y$ST8Z7YYs1ue5Kh;%W6Fa7{R^Hk5tH&XPG!*bdn-}d&IaR}2~&h? z1WSDMd(XN#mMtp^QY;yN*JOMdx1%}6my33lzV6z!{#~fTc67g$xku4?mWjRHeR@A} zf92d(v!(lfbdHh|*`AnJUT0;Oxh{=NQ4*rET~r1{ER?Q40$<{HEn6-Qn~26p1xU zOZRH95Y0ihfacRl#1(R=g;rK4bDgaYI@@a}_bG21p|ic&*Z`$%O;%|G0rXf5rSGgV zV&vS~vItA*&|G#Q-<_ili? z5yM2uWc(4p(RCZH`*5Yj7hexJQp0WJ=2-pKfa-0IRs9W8d9dmXjR|$xR_$l+@tF>8 z7du-fvxwvir0UJAek-dw1D;R=%?;%CCm_qVsx{3><_S~>`MS5m)y@6~&ei!9ku;d5Fc@Qvxv__AIGw2)e1O4`|+#eAZ9G zDSRP+6tOU198sgBR2E~xW-m0bm)ERH%s~0VZ@#`z4E~egXT^{9iN-!yYf>TyNE)~E$CF^EG``A_wWIc8 zh;WCnt%M~CIc@3s8#tgNIv3GqvTtaW_NLWDdF#MM0Zf%b3dSfyH$b=E2}b0zCm1Z?FuvNjMh!@-AQ;z@Ir?CnICw4k zUpXTCEnBW%R8QUGC_U;rO&0mab69`TAi5T5P~O!hiPSA}Wqo*J{$dbeM zVo?JQRU54qthN(mvB`jr+`9POTV$^r6 z=ynGw?`Gw0DK~5hX+Rb4wp~ahqBS?yo(<>5<_}}G$*e(nCDm(F5+Jqg+SBqL^Vo|Z zv_|AKI~c$YX2D17U;sN@NA4n6gIpDMVCLMZ%4~_KV$$+UYp@!&s=o;OXA|iUY(#TF z7a)ek>&ZZ%)r#c}^Ik5y@DAiuqEi*BYLRA|lpbID9i@i41|UjDuM{GCf@Q z`joC?Q?6jxr1fH4L4}XqrKwRXy8)I9TP(8cFdMVFkHK7^pe_83`n0^G#ke_aFT0eT zQoqTXMD-4`clA{`P+kp6k%LeeUHhX``20s{^CeoM+uwd1lZD)<)=IFq3>o`}tDaVt zhxH#X`XCHO*{^dd*fPEP_SbVblPafYZ9V+2h;K4&?K|W~{G(jQX2h1mObx*58o_E7 zjQJYU$N$F%P}&ZV@1s>NZ>#}RTQ@lCV7vk<1&QSZCT(GLI_8x|k`T{e)z4A2@p_R8eG2w<1~)8s@{i z_I&C%FuWCV&PuqEzJLYTT<%si2eE=WqP2o;Y03!Lm_|cTP<=qjUQ{b}w89q#?~SGV zUG##Vt&}ZW7A|3XF~PZ>X~()ku`Ax58xtpZlSbn%P;>|sMTIQtFn-o?_hN*HS+*=$ z1Q`~)Lyn1;VD)>06KX|r4unh5Am(+*NH2R<`xVKfr~}+!#H@-%Bo9_dosm4vj20$y zY2Nl&)m9N2@$F{I7XK1pV0;-PmMt->wG_z%LmVSfUo~~rNUPS%od4knBId7=)_sBj zXVA5puvPKc#HJL4Uj-+e8Nbhv9loajtEQ6_pT*IgY2@y}^pZgilbi8*ND|ZrAq=4h z2P7P%o!hyI4LMaK69VB$5EWpX9h^{R415MFU1B~tK<;HMfo`18*sv8gv;d8$R)XGB zM12XwG{`dO zNQhVtXtB%m93J`RPEQ&7z+36^@)h@fZa#?({Ws5hQ&{&m;Yea!Q(P1lZ33j(tAVh#{S#ED5iUn9UB57v zO~OTu32_c(sV%%2H5A6gb7*kVV1`7*Dz+JNv3G^DWOVTM1m;GU7sbDzEQ}eobJC;s zK`2Uq3g?;B2xNF9IG+27RE-$PqY1FQATmc2U`>*SPML`cSSdpp-ZttAI}3z^Xf&@IpK-Q z;XI`e10?m*;dDOWAi`LgCtx~XXJM(hhSw7Rx&Al;d1b`j7{>sm{N#OwOU~t!c$9_m zsj+2t%K4ymfm`6*+COACXghTH#E#0l4(ka_z%sr8LELP6KT92(?-p{AnXXUpJjz{Z z`ALxlW)S9F#3rFaQe~*;hUT~iA|@Uz(kpeGY7Lt@POXd-Hp!geP;B50<-Cp#k)h$p z8*>ku-rr=&ZRgD=-Z zPb)VxS{~q`?i_7D+YY`b-AvcldNDT}x^_oS7WI>^HR(f4ewh%GF%uln#2m+mlf&H^rsxV^$a)&3@j&Uy~mCk5uu9VSu+lJ zz+M!Po}^it8u60ei2pdQH===}Pwc&J188D-YL^l}pq#3;+=J;13V`~%Dheo#ZN`n& zi?mULl!5Mj5z`Y6bd&Z6435MRlQeTHK z$B;JY>Q6j`pTI*Nf@`7=Y?IEBrpr!=_`~&VGBO50XCQp{YCAtJ-F+pF+-<;fzYprI;$txsdl?&B-CakH0G#;MCaw zfiz5VtMobOirP*%+prFRu{TiZ7Oujz64#x$9>rzDwGWq&`~T;kAt%?EEidNgn)vu2 zK>>M33)N7;>NLAPA7!N!2~l%zZpFM{WH#8_8iW^y(yRe(zCN7cf=XdQtG~h>X$B-K zGOz*VeJR#zKsv!_WH0yK|fEhQxKGioa4V@yOcML$A5`BL**zF&~5=k0KQ#Gz%Gzn@eMgF z&L$P(9l3I3bt0QqLcsmv&1_N`o^Do`NF5a=LfGjI_%)3pmQJH{fY2yDY#K*sM)Fdi zl#v>+j|V8%a*p+GAT8g%o|3wc$>-bt6@p=U`BHYsEYn-;2^zKE6; zJ8m|e!NR68l54r=QH&1~CT5m`=cv5KO}1Z0t%(Goe8O!qhV!vF6^AN@=CTbiaFEnQ z5D1bEWy4NXhGG=m*EWR-*J@6Fgk6pOK$|eyva3v^80zsMR`|LCi0yR^a%~9F zkzp$*SOKlP*}YmlMmf>CN^%WCA_FC0DOtM7oeoTG?0yoVbxlUEb_3SkX|(|Hy5Tk@ zDv6N|3<|T{^gXXui`j}5u>B_w%sK$Yv}wc113s9P#oCp4q3n=P?w{3na=#v=6`|gC z_AWE^?#s^-jG&vAn`X@kaV29#wo6`P{ z2pkFy<8dk>G zv@lwZ?P|%~G`=1Qz9bq0)6-+10LVbpX+KCi9}qgUQ+7oQ+x`K-b3M-JOsv)_6+aNL zZRHte?(Fy$1}2=~9%tKo1iy@;ie;tZ2_Ysv5za#8OSE3CI1AMkEo*y)3Y14nO8c_- z)Y_B^33erWF=ozTlY8q-Y(mFdXL@`l3S#OryQ1Z7uaCMZU>&DNu3y`$+Z-LyT%kS~ zE-O9S5zP}e+#ZYCly*MI^RiigrDugW$fKQ?gR)BlR9Gkhg;X@P#H zJHcI_Y&cPLhl910#6J@|&yoe+9%Rd{Maw_$A~0?@_8I$J0~@C+XXWT*tSZxTGQ{Wk z@9RUlx;VY<)3s}(RT;SE2qfsr?B)s@`f?6|3~-z?dYPQeEQItPn8NvD9PX33vgY zkPQLp>6QLf{VcGSEi6?1PAr_QRo531NI%wu?os zQF()_79H{X~^XhO`OS@}NmBYw|MsI+r(WX^F&Tj_cXB3nN$)pwWtb$uijg z#CBsOUpbW*F`w8jh4bL-1VjM>UI&GI{E4z4nWj3m8>fDv&T&4QjP%#%_m?|D-x7pCd z?hCoNSv8K~5h8M)uKtF#Gs!DOVaP!!>W} zshHf91tKhbGs4BUPexpV_6J^8?niJVrP(O$;dC0t=3S`lCS0|+9>jGAu7$X+$Cc@y z?0jq4vPO>-x3R00<3^$&_ZO4lunS(E?%iaZYzTBjMM;nMZ~8122-QKo<_dxtbF^j4 zF6BdG_p{;2z4g|Pw~Wq_{JlK^6K39XSgxAqir5h|U!X4ITdWYmc8GY9CfkR|R;KMV z@VBj}xOoaI@YPr zT5O2P!A>H-_Cro%xVkIg5GZTthkW8GB2ti$R;_$k2#Ct8x`Y+Fw8(_P>JsYRq=_vP z$QR$ta^UM^%bV3pdaF#mL925F?aJSaiM_iIb87fbu6?YB2aw%QgSw71k9%NEVM519 zzJV=w`+FPa(rN_@RvP!IK{xy#m{DjPXT0E8s>8Acbgw>HEd+z*0T7Hk2a+;d1O5S@ z0?mp5+tV;tUG84hxP~S*(g#sgR$8H*$}FW7p*Mhya%YJU`3SAXbdCka^>Ps1<*|GH z10L%_z8|y{3W|AdREnO%3h)lQ+x*%GpMkCr-B3oNQrkbtHJf3S^qr4qgyQ`NX);ew zz#o>2c4oB9dTkYaKG1wSM?PlN#VXX^ zJBqnxLcxOp9ad0#9vlwINkt~>`qZG7fXh-Zx4h8xyb2}xjzWYgruWYdYP+-V5wo@y zuD|a!vxtBJc^BD7qzR{0;17}%VGinNRA8t?Z1Gq|JUc1Ph-nr0gXA~CE8ruUG~w6^ zgAL;n{JjvmFb@+R)rI|}3$HKXHAhrToIv0L?B~A<7$9~L3|l}S1KI(jt=%a^K4ih-I&ByUY#v*aOR!s%e-wgO+>tAS-6h)%BP;&uYI1!|HxtWusFF=kqfm z68FG43wu+iz5beZ;Vl!`#>$wvv*|M*6tsX^Wz#+*zjZy0Dp$SKCJt0hGqS46qCrCy zu-KZ9x}XCsbD&&PAx!fZxQ1a?s~m{xj7;*gH~9Vq6cbmm`pTkL4fRxRGiISRI6z@; zRY};cJ%o9kRn;gXH*J7k|A@*#-^4-pR83Q>mZmXGZCc?L9>TR1*Ku4$Xk&K#GS3PG zz;+mE8xI=Rj;XrH<2HDfvVQGVBovKwM5hVitGZL|(DU$wv9)xbfa#V5YGr4f9^naf zV@$I^rAfdP55c%p4N%I~Io(O$$(w89@0%=t2zcRBrBsZ0nja9Lf#?|n-x5txg{^Rc zVTkwlBFYo)fZqA0jI8&WJ})nLd4#l9B24i{(`n`_#DbqXzc7<84WD^wPJl%0MZmFI z1CNu{Y3!MA>haMgxiS9;5MS~#LyW)yq$0|}jxdW8@w~{jXtyD9DcqSIyG2A;D0e|g zyo60Tu>j`oc_-px7~$PP?Gk;y!UWJ8pjoQp&5`^Ud%Pw-BVe&mxl_uG6e4n#^s9!* zY}W#7B$w6MRIeFr@%Pp& z_6ICmninA!M0wj_e8aM3UxVxQRqb;LFQ9J-f77z1reBJL_Uh%8`y!E*0Ja3gFL zWOktA9kAxGKjVHZ9PN)E%~jsMuJlCbyD8*UPG1Cr*GWfJSYj_D0lAAUTNW*GB`h%* z!9wr9=)3tpWqaV|DSE-u^&%BZ81zMZJVd@5*9yLoLhy0NTO}QzIuMQRhqFYERbFdA z2+yW=@-TNAT(J2S8+JINAt@8}CFm%~<9jDKaOaZ<6+6CyJ2roJ&y7soc zh56NaV!Hw6Gj>uD*j|MYddwZp$3Zz|@Gih<(bZ>BRRE4v=kS^+tkuCPw~5rD)58%g ztYhCCQ3|MXEK7jp?+K)9UE#*V$zOBm!+3ik1yO@+R*JR~;~&gW4jZwl(g_iOnP3Y5 zQ-nq_z0JWZZKD3-(Zp(W2)#+Hq6gHk^q*Bu<8**quvlf;qCB>moVnd=aYJ{}W}8~P zhy`<8SRb;)uBTGKn4D?Xb(}ii()~@m{tc1C(p7<%i8=TUb}VygmiwmS*)?FfZzAqQ zfJ6=+EL-YoTpuMqL<86Y2_6QeEmsmP-DiPf;!UIym(dw8nkDu=(s0;P84hHi_7Vtk zFu1@6i9ZqmP=HQ?*%U{{*wVrrokK0LKjF8sGl!Smv?)_o!GjLnNYFPSQ)ipAj5v(g zz|!??0zP7i-9UE)Q&{fv;>matj3@I$5itE>7Fw_*7QQUy8n(n10R%@vGhmxtWCmNY z4J*R8L8@YIqY$=2WJuDcSS)80rZVVFL8^}1FzV**`X?S>p5XkId*PQ95`*~DeIi;w zMU0^4?z{V^_Fki?0TG!av985)H>R8!L3yI}ES zh@xDEOpl@CcvLJ5tFx)dG~x}fb{n^0_4XqZC)iRDY?mcw!*9Zdd?O$*Nejy+nuVaj zBCHqk4cd$N0}KT|5TNV4d%atjbAwx`z*UWF4X$=v&)_!5M~4c>U#fH+EP14*tT5f@SA|7l9;eUugwiW^ zryMz-hO6sVnqL;#)MaLDheppu%tg)S_}LsV1rBO>Ar*)%F(4e-x0iP=Vtim(s5uc# zC_$}_6c{f5X?bX33ql~(O${`XV`1FjZq{;f?8=4IJ~%8a3@b5rq4L)(T_Frw-8UH+r`Q_J$SAhERImBa!R%j)>q4gaItVOoXn;Hi&R2<+A;e zN$e_`^=WIjoWu?_hMm?Gpa3l=S+VW+pS;ahpC5}!4jd?lRs}WfJsR{5iZ-jHXtRqE z>80$b0tiasyU2fYh*Hf>IRWijy@sqc1b&B>MN0*= z1rt8AptkC5>;l$yg786YIXFVuVIB1*KKGmtBs9Y};%D}lb~P4HsIV1YI!xte@RY#v z**eMf!urX&|I`%^J1Ah|g>|10bNI1t&S})eL1V`~H2b1z+P9AFuvqg7YR_1u&uEMr^3Z8aOysR61leMVnfb3? zZ_YKooXB)SSPBgj5bY5V62ym|-hr5jt6&yjBmrwl-WPH+A4pt)C z7QMmDr7Ru`8OZ-Ky12EIdMmaM47glY#R5ccG4Tq%9|NZL4s+*O(&XA`D33MTfts1~ zAR527?6q8IBGHT3hNKE`lgM&3#YJ5f#Gdpo8 z3igmce~5E5pi)?dJR~}^;f&}uD6;f7-L?ESe!nAOjIFv7kE1RLPo>xbG}M7X`NMyu z#yk6ma`i*9i2wOPSzhfSY^%agSb_2vmwwHl`ivL(u!j#6A=r)S7;KDmt_Ex6X~=qi z_BNEC=tpquS||22MQ!Tsw(vPh)Gfeg=BYsJkP(4wZms(toJ~&ji{a%Ix`pF7a_HC) z1a9HblbsZ|Oy&iw3KO$;LJ}PSNzQQ{+b|7D6N^5Lr*hcRZD;Uex9hdA*-wN0avdfn z&@mL!-4`;2MS{6(?3)xFD5-_)4p9w>tX)!DC3X!ZwaV-+ID}<7#8q07Xh}bpTzp)ybnL9i!-$ zd>Oip%BSu%tAH_rJYUBU0v7>2H{g(ejRNq(uk#M+$E<*joH#HN)UrdaaB~GB+Z0&^ zhIFv2k+)|K#2zA_BxNH;QXv-Sp&Xuf0=rh>S;EOUd>HVZu~AZQXl)NZ&TGm4Pzh5; zV)a--B~iZ&L8}OLyS^XWPO!d`$qWX@LRqMpB!XYNOZPd@1QouN1_cx-_!8JzU4tU0 zS7Gx<{urRZiuOo5gKVeP#A$^d8VwQ~Lor`>sy=)^Hc0urw_a;}onuMb=mk5uyI0>21nwNB=GB84DSnkxmy;q73fHdGn*Mqa z?sB|=yJIE?LWaoOTuOT+oU!t(_BN4_!b&920WYV*nu?g{q!)@`#8d(gmhPAu(|TIZ zT4}Clx6*FxRyp>4iv7O)@98-;vIzdcf$<{L)gM0JwKH4{rZtWjhl7D4t!Te?7VXum z-NWZANU@!k2jldN@He!8r&7f<9Qh1RgWk1}b~S_2{(x8eHt#|++*G6o*oKS;XH$&Y z{D`EZpI)4TrU2u_$O^Ri=zeV9>9af%$2nE;ej4I()1&}gmA~fGGqj3SWFl$RxjO7G z@qc5V4MMoN7d$)(XNYMBcuc8z4aZ{oQ0~DwgoPoZAEQcMKzb!ctk@E6(f*E#I1WFF zuK^1Frh*`Q159_}C;~-<(jlZx&ZdRULb>gcCO=0Wnq zUAh?^30q)glN&KN>E(Ws4ffHPzH7$3fPT*MvC|0|;Oi2g&F#iaW@Xpo>?x_9Z<^Ik zafJ;wUUqXM&kpCa7t12`no4fo+bH%y@8Z{qvwom#gr7<8ZCE*EHz3M=a>E(~-;s|0 zWz8wQpun3zxI0Y=L#s?U$RRU|~kxfP~X>UnQ8E208MM(N>=0`_bO%Q;t+%;T$8LA3^PkM{Evf8m|Ne!VIQN6~1rMw7!>62YNK>{<2-ID4PI#KvLb zcpX22Yo6vojplSUtA_UTZGNdsZgUG);+l_Z8LsPbwd49BE`I(H`Y)V5VQ`6+Ewqt( zi#UIV=C5FjJlYQ*C_EV0# z*l~9*Ja*HdnlZl_T}uql-J!YM&B1m7h!cAj*dX_8T^K8yjJgWJLXMXIiSGFe)baSS z86+=0WqE2Ynb@@=gNYrG66-MJ=%`w}gua|SM2^%MCkM&T$+ut9%xLN2W+_k=6AO5L z9aw4KW)){49g|&W*Plof;tdHpR0ZcC_6bNl$ZX8+%!DbOh4g+v8?6`9>r&@RpC7He z5lk2|p;mRFOrzp67hVZ+Jqf;pF-9JfE0U|>u`zp@%#U*Y3L+$ zPT11{BkcJL?d_o&UX8zBfHQPauul6K9Bc|cOaSzQ+P|cK__b4$LHk_b zf^!)pYCB{W2%dOsA{dakgS=&C8G9>X2iJg=u(b@^M`%+E5m84=U1Iw@W)p!Z>gD(a zf(Wqcz%uHX&eg~sAoyR_tEO~^roppaUj=bo(DbtyP7;fujz+WQ(odb z|7|rH4qmO-R7f>RiBq8c|K&DaP2G6Kkb!0ln(6ZJxBQJ$IQm;#ipX%x9_2K9Ae0ex zWA-RSSN|A&+B6|~`~PKB?1ajUA;VCf*7+w)O|#RrCN8D=@^#6a<}1~2)9lcnxXhrS zeQQEQE(|w52KeMUcR6QJ#1>;po1J)vbV>M@P?R)pQUt)G$uiQYo0$cZcey@!Kk$57 z2?$>g1)v-XKr<8oF0RrFfD-_q3x@4fwv3mp(aWyPmOU4BIfru6Z6FMQD@Sw{5NVnS z`E=k2mrrR$ky3V#ri2k-!}dT?9MBYOlN{5~StuVw=C#-w#glcgB9is?v&tEg@lzOL zGj&7kI)w8edXaYggH`ca1rUy#WojK8Q9xR-g;RAQm)>&O(_7 z?wIZhNYSO!Dk>p!`DRwX_Wm1ON`r`KWg#iD)?DMTYBk_wL3M>eq`iA!Ej&tzUqPX! z8UR1GX{#uV(E0Lm;$BMrFG|+0lB*fcM2srf_8gMoZMw>Ye$3~+l{kn#DeX(}rR)Er znfj}zGr#uMFlR)pr8!|-5*ejO zEQ@e8^crp0G!Cc>Y4Hor4bFuS)eUj>zKyFUy z%K;f96Of>RoK^3py>&9qTjhgs-WX7KWI)ve6vh=FZ^yy$LmtO#eFt-gJo0PL96VRX zD%hdJ`(K1>+3mM^Ouyj97V+6BI zDF|sWN|S}5KDwO;R2&eF#;^mq3NpDs83%Iy5?TCOD_8NPA#yV~4xUG&h*0l(f;cBD z9#BM1yhAEctwXK#vVb$@DN(&nY$#Q0aKxId;z&;%eo;dkpAl4qz*6kn<&J7>yUhp` zT1x{3g5H?7#E)JgiAep){9GZ2Rl>WvMCmh9&&MHWNVJn}621`Zgf9p|VfTJaDqRij z@RZr-*j(o)7a>ag1WwXL8Mm385gSZSokxlW^gUi^m=l=58%m2XGMkIVcIqW)$8fcN z1OdXPe=l`KY-!#LuElorQ6b(sq81+lG}j=Y4@h9sEL;tidv{W1I` zg#pMS0r6|h0;*(=l%ODJ=&m?b8}a{S4z=0Y3?3cDeX@wC6tPptuT{U!I~Q=AQ2?g31ByvE3D)K7x*8h&;)I6hs0Eq#S(i%!T+>??xxkB$A#%9@|gY@r(#FIq3@#@AudHB8NG zpMpkjc!D>&1c;I*2GI~JGoURFDFhm)Uvgd`M z?zuSmW8*BC&Q{47R|6ybfS)t86`rgTXEIkmmK&8hC@u*{cwh&_UK-qOeTiR?n65G# z65l8I>v3?L$f8G-4{XZaA!74FL=6|J5RE-Hc->-GUiOuPmI4HAj@Z8*)q zKxfu1v}%ciaPE_^%m)w0Dz6?!Cc-OBn{KemuHE)~of9DWVxYpn07VLMs1!J3An1bt7T;$Ju?Sq4>oua($Qd18*-w;6^9b`$a`CaMg*TRNw^Wsup6%BG~al2Mn1F zBm;rHeli#7%4UPq$l}?Pxj6eghg}o=7(7dh9nrLKhv4Wd1Y^N_hA%T}wZ9|w!BKcP zo}WKLySBSt#ZfF9?QA>S1BweKGvKSkFkYPB4lRl1wKvKc1(L@bx)0 zWLHBrH6%se2V&ze(rH6~GyL4-po1OjwF-d50kbA75X{HYH|Cl$wi8Fp;)J~vKV23l z%H|Dvcq6r+lBWySXgwm;kK1rZia)BSU<;~7j@6`Gwf6NE>Rs9AB-gybB&$zyQaA>38#vZWn zkBskuYJ{C#$4w-@<{8Ngtp-aZXVCi9tfAc3`d-(2=k9_~4PN-Q%XKY{oTgjIl-ObZ zlKtXnfH@Nbh1KGt@Oh$LyvE1{=y{5~wn6n5eF2Cga~(vOjT|Q0cXW{Bf}zlk^msr} z1NQaT<69jM>kvt44$sDV(i_GTooA8O-cpJ&vT^inUnqJOR$% z@Fxw>kwosO=e>w{BTj-ch&9@K$r|jgB;p_6uSe=zsX>u7d#h z2)`k>T}ex94+xBpiJ}O7%dvv(#BgM%)!XS8eN|H~`fPOnFF!6$X`3twvEs?$WdK)! zj&#B&JFX)s{=6I=>Es<*OdVOyJMzb{q`@2dhPHZcA1#9|Lk6ymY!?}SDxl;WBL9ML zUprXR(lU$Uao57P7Jpmx*VSvDUvu#Txu2X#>wjq#y^>dXZ8^$_?5(_?-08;n+%i zdx6?!+0=o?^3jE(6-}|W$+FTuB80z#zybG|HqZ&Bk+~Sf7*pdpKkxILkM+-2dE>2S)?=x@T=Il9sF%uw_BRDK^KOA`w zvgzKXUM54piK`G-F|HC^Ww^?5(WemHxGHhg;982yi)#h0mAF>px)Ik+xI(zr;%def z#?^|eEx^}CvHu%B7jOhV9|{Aj4sUF5+qn1|vuwo1EXf%0aFHlOB+3wpW(Wi`v=Cj6 z-*pLy0W&jtp00(=aukB(Gsg`iM;hN_;bTl0~G$dtx7vi zgD|sKU4FUcD3X#QZ0gzIL@Op-fTHhT1u$p#@R7$gx&Bx^zC)l5z!`$5l#Azv>|uPs z$&PzuL~XTilZaVA!I2>d8AYd6+0`m3Ig#OBZC9c59HZ^4GPxP*t&r8&LI$rQrYMip zHj5R|PU7&?u7xJpFPKmKb=qgoa8A`fRZQ(o%VHjlNAva>_&~Gccn)Yrk*bn6=!1Dq zF}8LBuf3wP9-SwZQT%mwP4EB2X!X#kNnlei(4u4K&~afp6a!xu3s1wjD5^(Pv84by z8rFUw@){`2CjI>@D8rFz8uDKTWOf?J^id!@1npA-32z)hfm%&Fe+me4t!UXYy8n{K z699;y2OJN4>me2b)fzMHa=~s+E5s&{s5H7ja5QS20HXbWF6xj@kKu30` zXOuLd$Iu&RtCQ7%^@!mO^OduX^`@sBXauxU+JHQh%?NnWY@8gg75tow7@XCsA~K6? zJ2O6(^)pHovx)R3TS@vWKb449_G!oTZbwgEK+Vq$ZNhM<3pL}fS6jCg1ErwJ=+jnm zZ;XwJPMkh>v7YmhF*$3pIWI#_poU0STiT`>S6};A)t8fnqQO-F2)w>X4nhqBQqaqC z(cU)z0F9&p;$tJhI{kZb{G-V@a?{KTcniF|1wQSM5G8DSld(?wpJ|9_HY*@810CxZ zTs)gU_lYA4edGHb*$7U>o(E}g9S-iD9y9-)_=um0U8nu)PdV8!aC7m!AO1tkeEzp{MCg3lEFuc&FLy(=+y!I!cJ(H81zIcq(+PFwDshgUFdt((bRvk{^FkP3xf zpYgPIfD2F>-Cx2J4O@T@=ZQ)kXUkKVXDHg`9z>N;Xv(Jh-2$RVh*bzNVe}(WyQo+2 znJN0{Rv{+t$FLBq3&byB!XkphnQ*cW2V>jWox(Wc96{qK!>>X%>w9DcwvqN@`_H*> z@}pz8@SR6^_p4P{)Po;%V_9^%z8`#$8}Aj@P` ze2Ck{lIswCW0TYse9~i)u;V_ruoqX$X6%o_bqg-oL)_%n{udg}_NNcGPLGjCsh%`* zuIC8J`WW%(*u~6x4z=2&VMF+|zJf8gUhYPC&mEVtZyfj%OhO-gBZ_GT8e4T@M`B>I@ix$r^v0n5D< zT~~UP_D5(gs&KiDe;Eyw!ZEWVnD0NZuA1rcFR*esVa-{2wvILS)|rKkX(po{&oEg* zHs{B`A%Hg2L^By`BYkegB{bbZPtZ)Kj!BP>cobs`$8q z4WG%j4>(&(>F;so-mJLcFwecYv$brO5ZrJc#(!u>wqv|nr}sJ%Y#VQ@(A=tL^^<6_IetmRpjwPeDMe)l@qod7ReO4y^@A zYITKn2kwxLn9$T}H;4{$uRi$|X!ZYThp)*3SolQ%i**1GeH8$E7C_D@0Dn*Q5}4WpS`-AKZ;ze89k=^1_JZ2B z0Zz{DzEzR*5ZapUDexz$9DK0-i5rnPaROu?W>=*TtQ{Py{pkCH28CHmBj3VDvU~^# z9B>;*G~xyolWy3f!k+|s7)bxcXH^Vbi&T!-NB93>xQhM&|8$v+A3WssL8Z2G=$LPO z19S!@{>(J=tai@AWyf_c5@t*IGe^*F|74WbK{Lx6r+|G(D<&3Fz+UC5ift=7f59#E zWweX{V^_E?kNj`u-UT|!>T39(B$H%9CYeD87$r#5QBhE15-K0 zNhXMW`(Nu_>;JKMGSB5)_St8jefHUBpS|-ZMl*J0yzGsx>0{nr>eyQB*m}mq!vv~e zcMZ|`Y1w(M8hm|8>TA3LBvNx8a45A~zaBa9<3N7%!MoGNU5T^@=En;v@ zQzkMFThhcu*|B!61RdG1npJv((<@poVfBzZ3D6pG96c@c$-mZ}Zp_8W57J$=dgxv$ z)yb)}+R_@;q@Qr{!`|3(rUE0sgTC10;<%4hFG>>e!b;vq?w6MSaPt+IzR0l!ecnzM zNZQ})(>|7G;{aaH*mdk`p`XOvxjpphs7-US)_0qS1GN6O^+XyIS5ui^)vXd}v5of! zL^aKkkkW=C+EJ~dTDq{a2w?s&W|THe;~Dq=j%x+FZmw5O_YJnibKDxz`3@T$OViGeO(e0crko1kNU8G*k1 zWj!-&g(T3B0w<)7CoT@dOj#?&rjD&6Wk_OmI(k{ur#@i&lTHk04U&x|uK^pkl6nj=MW0MP4 zQo(;1LW*@h)up+Vfc$mx=}aADzKk$*WN>8#t=-nFOZePp%&lNBa25c@>#9+=hyXK_ z{r%F01|eRCNMW@e5H+fdZ}{r&Qz4NXXOdm}&NFFGQNeeEqKk1IE-1Z;B* z7gVb*BpGICV}7Y-ecFR@avZnoZyK;hy{eo*k&yxmo&iMXK*U~4En1OMhTQiz3k?WE zoL^e4Zq`p$oIHh>awNw}LQrnsWKU0Pg6rzRW0F6{G>Oj7_{-41xes$dD8v`eQjsqS z2I#tkkIiY9INHWBzMp6T$w&j=U}V5w@}5|4KDY9`8QTJNRbwJ!Fefx^xugAMp#<d<58dNS^_K`DdWtadBAqUjcd zhf?rTDW|({zdMN+KNH0Q{gi^2c@}00J2S<}co{VuFX1fHuwS^!hC>=Tycz`_Agj03 zB#~?93$=FmXBjyfUrQGU920w5%F-?o8fUJ@9~McNHI0$zNcNfHD6g||lMdq1IQ2sP%)t>lUTm%%+!a zRolgM444)@qJ8SLpliAD3}+0%FX-J+FX&w%YPkkEb2yPtylzOmKGpmy2me<1jn|C~ z#E;`@8p8a^OmyT)Oq78r+_TmMO=h4Ba|X)hL`%7lOGac4qj;HP9sGw)YU$P3MxBx2X#axbPvOIE{Jm|F zqBI|F0~q1Mo+KasJ>MD^!-voNKhgF(XiBusPOWcWyPej$%E=RPF(lbdtv$vqTk9HG{bkRd}6zob*eo#zcw z#N4v*m#n?@+!7KAGl**v$zWUz*j*|Nq5*>DCPRQ>r{5^PyIk-hg0Wqu)O&TJy@ROCv|u*Z+s2qyE!}>jzE6H=8ehn z?K4AUyu3Asi1oXAi1d1-&J?mPs8MI=RGO@%Tp{ZTNwVIL)S$%+&tWO*B=O=Yz9z(r z2ficam-TL^6&e>wV)`6Jxc;WeQ*(-SAYr78IPN%402(d^_KxEnL&QgVbBNqQ5BBEp z)~D88FVu!q5WGyr+oV&K`4ZKgT;@*RVI&skpzF*j5&Kh-^;hp68d9Hwh5Wy22;{)) ziHvYYQ=U&9Tam2)vDUa8{Ayl&Qh$Bv$I?vQUwcpPuZ#Jn`zs(upeOg2Lvotp;E{WE zf7PkEB&5Bue8Bdpri1z^oH2ExqkRReRB$jsmo*HptV@RDG%+iboB4$ctSJp=AT}T~ zsC0zcG&sPI@1(%FMRI(S%@pEYy(hJq9N_%n%OcSd5}sDiZ3|!mTWSO?i)3zdhNtrP zh6H!$NPIx{zVCqw%Mq>xKf|XMmnCEHUs^^6|Nl4tr5kZKWAr%n{DAAHT)*Ucl zC%K;Ca{L@Pat8mU<>c`HfBj#221QyE#b)xGm6M%glL`N%^>$*u|A=*4$s2V9+U4N! z^t4d_2FJ#KL2Hh!nYcvk7Kd3YhaxRnn=!JTnE%F+fg87W|{8p`HV54@Fv&D2y^52gAavANtVBn6wz zDPUSoRnn=!9Ge2>&T9to`3tLARo6=Lwb2=X5~3S+B3b#`r-u@Us;R|9q`rstzny=l|y=dCii-Sx-n&s1dGa*d5!u zd~D~kV|$loj!tR$i*pl5>$rK9oqaP<`dt-Es+OkM0lJd8S1;LqvkJlU1Lhw(^CI@CllY#h zjUMTklf@#cZRgst%a=K}b}h{m#p+=D6EFZZ3kmc^vXyTeU%I@~e@0|jhkee>${zN7 z(u3`!=Tki9=*2sKL{pB|RdlbqyQfwaVirXpW4}j^>vNWj2#)?q9M=!~3kd1Vp@u5_ z*u0<)Gm%0AVL2d?EG$`Gq-ugB*_}*6u4EFt{8W^+cUIQ~*|T&uv_aF=s^=ali#-ig zbatPbpcjC4>}H(4h9#t^Q9siuT*(w!i4@O0H?`Rr%wcoI+FKlFw#VY?Y%C}5U|A&U zzJ_ba?Nk4U+-S{Pbglw@ghaIJEA-+CV=}*5$Su>BDD@j2)HRlvd9F$18HB&Oh)w_V zOCZRdz>FUQCk_pIvcyaa97bE4ok9}HtztG6WSQ2b8Pz3UQl#lY+iiUx~;&{BWXu0F@9aO(;Ww^3vspF}xAz%U8 zs3@}dire=DF2ZF8_6Yd!E;fn`ROz?4u_yjJVLk7h^DxMZ*slt&zsnkY>!kh83Qo$+ zDfT|wd#%u1J;$*gDS!Q;dnRdgz!9vV#}a?oug%B({C-aFd*0noA4KZj<9y3)N*uiwijwiv7Q zoBp!q8Ss+!&IU{Q3`=7ft_&+p()OuCS_pE+PQy;cB<5LhcEyp4H>|(Ydw7H`Hma{qL z{xx^xp4rmVj>kvPTi$}Z0vE`R?cX`=)D(jai3bBh$Kr#BwLc&_L;bd3^|HmwiN3Y| zZ92BEb-?Ks!CRvgS1Kp*rUf!&+)j2!&1e@s}Q=~>I8-P3c@ zmMz{U$(Kj{wRP%mU=xm&;H#ml;u&hiO!)Ks(f}0fUrD*{^ zEZLv?gaNj;s4=Y;Ub2tz%s#F_pU2}TOuxb?X38nE=NPiL6d7s8#}fmhgU!RmXMcWR z68i!paHQEhMW@BP^)zBUdDviGe=%GHh-V4M+3u1P46N>NO)oj>C;hZb<*hw*BCR=3 zP6^eiYe-LRgY=J&O@dz&RhD?v@Oit|pSNttu>^Q?E%1i*cO&X>q5BCt?P!k z&rGMc$?MMzBbemDUT|}$&?p?oIMnft-FvK-(0|$NTn*o}s!j}*%1iHdF znloR`PC|q*QTW1D?l8d^IH{eC?6NXkRkW$9SmOQ8t?<=_=M{EW&P`VjKZ|J%OE2Qv zrzN7wV+J&_&i14jc>%unHSuj+r%EM23qY_}-Uw%hdyvMbk($FwNoYL_0OS88O|@Nw{T zK~Spa=74X3t-FTT=VC#C|VKZM8nrS=06jH7wiEM3%fD4OCw=u_#$&VAG z>{s8GjP{eM5qVBs#8L)-seaY!kS;4?vOcBzP_?N7{M%p3ny zoH#yptBa(7h-W`|>THT;EUzcWMQY|Z{cy~~u%uE4$Mbf_F zYLZL@WCpraE+=*S7dzUH!V65=42T=H0*2+NJV|BefYcPBiRP<}HANVE&nWZuGze*E zF-BZGC*`77HVIrJ7tJ4(i#qi?mK$~gl&N#{QqZm~EI7bBg{!Q06IIu$@346Q&j{UC zSRS}hf@kAaNPW{j9E5HGA>Wuj9E3AiROnSljjEW=FV-vd@Cgaqr4WGI^`NhQ- z?ePv`bJxT&HQJOo9;14Vj!BNT@5p@L?{&0k@BFiHW^&t8O~Nh@t7QNh;iwG9-9q5S z^l)4z-t0iOFH-skWe@S1cmiXHFoVFJ+r`p%q0C@qxS6Ytlx|ahza19<2l7i~ZMytC z@w0asdM<37i%fdc_RQ;wj7v)1*tGQO&<5v)5RP5;yB%#e(czrvy?6}2jeOtW8Z#PJ=vF*S(HdwZ7i8MVtTO@JS)PDf2;d~R2MNHZ zVT1-#y1py{x11uzDB_3vG~A2`C-dV5Hg5#>j9n=}yJ!P&cUNAWZM=-1$PqNKW8u{u zRmCmurd{;(rbHDBQmoivXLmsa1m&zE_6JP=pYSp**@^Xlgu4~s!V6z4uMmnZXV=we4&k~BW z1R5Ky4J{zARz*l6v3~6|75B2HQSUoMEsaOJngM z3o_rYNR9K_8n;p5mIc|Y4vuZAdMF=IF9g&JYgOzo)QCw@V(NLWO@sPZ8ld^r%@|SD zs_tFYK#mkexK+nWwVk=djA5HMOe5#6zLEZCR`Si^;;c({nQoq#&6XQ5^jl zdAfDGU^t2ITA3RENI@2Ly-kwZnH47m&-Sa2(TADSWj`!W&!AW86mEJqwlCpd0sC1k zLJ|L}4~7xypI|J5$u~jBfZ7DE6xnx@n?Xq~B3KpgyTpe?5n~ zcgFq91LA?Gvv&X(99gZI5L5v_`ONcla$Zy!B%iIbQ!M$IVn@T*| zaA9x+g}7*#B{r5>*QG`>RvK}-Bk~rc2g@m-xkgXpgr}IsEUHTyOhRF0b zN4?~cj0G%LmWS7P63xc~?j!MBE0_zMW6mNY8}{l}9e7Q*3j0OWoNE|s+zD#^%8$;o zk?^SM5!P@+Er{zKh$5B(k6oba8gBb2Iuq$HdI;Z*FAyios@97jLhWioxlCh@jSoW$ z3y0Ptj;$^0pOGD__6B~8uM}|#y8#UN)t*BFo&975&)8Qfa7dIIXoa5rYLsqDf!6PQ zkBrE`oOb-M5X@*ECBAug#_JYJ-*Zi&>par`>ONU_;7ON-ey=R18F#%GN%ggqq9vdL zSjo>bNmW(EtmI*V<<>n^I3(U}d}SC+4JJ&jKuUl3m5rjv3P-C|lc*1N+50W(@~aO_ zi>H&@csb?4Ba`Yhw^`n0W_h}ekCD;TVdc_BJzLUH4|L_0cKSZk&H!UfEae|&E~UL5 z0E>xdp!JZZyuU}!v2x;au&eG^!9<%FMDcQZ=Y=SKWwzsMhHC}SrtLFyY_d3nAX`|zD20VvGEW;5cThSG*NHnhYrDI8Lur#9%Gd)Nc37~weUy9`jbCi?KhgBBT!#(<-CDQJ0&VKRx4!?a|`P^^e zcNbSH_ZHj#JO9#b=>U_V0kZNpQ|_{C*}8N26Y6|JGp=NV=sNkGrR7=Xy zZPFjnZPE=Y}%wddtC|6q6kKgVo}NJcVrR;H~g<;gs{wN?&t*!9RL zeo@aadR1|*qwRYn#r(n1b|*LG`Q&u8b#RN^JPDPJOZetYr#d9dioVbZM*y!OTE9-P z5hOQs*rjWy*Qx^cAJ9lGMDg1S%XMtE2QTKcX%C6NJedyXBL5+3O!6(nExNg$ofL{Y zjAOyyEH@Sd>C946b?X`29W6N;%Y0IZtX4gR<{Eg48f^<})oEr?QpP$xAr$o>^ET4QyJvawEgkcVF@?8tpJy@rE?HBGXEd7+4xs*cI&DUWIR@n=dr_js zL|{<|*+EIFF-2`G$~)AU;}9I-BO<1T=d&APWoz%&@$wDb7(I89*Hp#AZS6H zBNREoLIaEPuiApPC!~YKZ(@Th_1R@$upfM-edp3Z0dCNl3Ym6cJnFv-kQCE6k1WQ4M&o)lbOt5%z) zPKa(r#$lFk6I2BvE=%*GtYD7u3{0*^-- z6oPkHX5dM%~I`HdhfEYw7O- zyWkd~Ky5-A1u9b_o)U%rpgw}FskocI#)*f#kp(xemguw`ijs+cN>t*j6mcxZg?}@G z3Z1^WrFn!UcqaXwV|*d^IG%<+Xs7@YKJ_gcO0N@c4{Gph3aM#4m-A9*Y6?MLK;QWY z!;QPfjiclpvj@Jm4tdOw%0+0<-o~D2p}J5gupikA*b(h6(vpdbj&Y|2Z8Xd+ycPkE z*1FlIdELW&$lC;YpOw~8S-gqnchB;S!gkMa?%EbRe&yVH5@FwUUP4Ky#Rn&og9m z!2``y4?-j@&E^)sAZ8Y-ch1n5A(V7+xDqXx6(2C=Ac;;bMbTEwtws_GwWK!~k*`Zd zF({!v(oVC;VZn>UZ;w-ST;Qu?o($~?yuJ{1IwyizpbK%#bMe*9>$5B!3yU{#BA$ED zQ|S^S;=if$M41VYn++0eZOwfIz z2@AIXs!CIltz|vu8433APxV(}i=1bm*fOXMN{ba6QS~F?rW6TPBUe{w+}cRKRg%M^ zz{q0g2diVjd5t>jA|606LBch5!83%dW;2SC2%ewZV_B$LAaBi_78awIWYe8qdsCC6 ztPV{zWg>nHC%x7cY1d(v`5bSoDn`nl*u^%7;_ z?$`mm*A6bS|4$Gqs#7gHHJ0jBHpW)67h;{d#4{-%zP$06*QqKHE?aAqaI=(v z3dJwIsPfEGjJ>A*g%gIZ&+1=xpb)2!G+oI2;>HmTD-wmw|8FT|M^6G2e{tw(LdUyW zKiMHHX*ejio2C2_h>}{IMYn|2xPmzfdY`QUgUu3p3WgbGm_3PX0_NRQ*Es{?_UD;r zgbmJ4PE$WOOM8ksbZJ#)X~J;pl$_T#*t>y>8MIzKXr-Xw`J7KpO}J`U!qd!zvGeyS zoI(&AV4>PD5`4___^Jt_rqY=dl<}iO~ zajUM;AzaL;@HG5{&XO~4g=)OuXw7G7rhgd5_q_>cN1g)l37WY_N*JV>ec&cVXLo9v zxyLNzdD<_%-I&+VmMFxenS0Da%&t$Nnc<+!H%r-=C`Ec*(~Pv?-_Xn$vVp5oG*8zg zvZm0?Q&##q>Hh%5AWGgp@uoJRkG3ZEn-bgawd#o|r41=JzrPC7s8zq)u4%!z$z7{1 zN0F`ZDthZF495RVZ|$QbX0%~@5M}z^px*k2)XmsNybaMN!DPmZiKfW-MbzrO0?r7? z65|RTpdGN~wpOJN2m8IRYOn`%fk;}R*Ma#ak+ce!&2D6s?K58dgPyjc>NQ3h2z1L~ zq$|*;!P5$aixG{n>tEFi#ZlPHC00v`uwVcXR&kyLjC3a*%gB)Ns1()*$In1l0 zMQ~Y|5L~S4L;6|f)#W17No-0j^)FNxFm7Rcj;-^G!}B~GxkFqDA+=GDpeN;cwXDbd zDjT@9JVziXb=}7Tf-_X+At+ss)svLw2)SCL@=Or`+?*!O@-)Z~@jJVQ;U*h7o01B_ z80^Wfeu2st5zWa?fb=5olIrK@s!@RUfxW@*QH?@B&CW~;CcyHfIvr0(+{C%yl>Zn3 zDb=a1dLnhJX{l)guaM13w&27%wH*}5NgEdoQI_;%Y9*6jjn(Ss0>Ejdp)rR*r; zTD}4?ejQaY^nf)^ab&?6X)+S^^v$MQwzicVw&`|l(q{5olw>lmxt7lAMhl zAGcVjV$lgc3Umew|Tx+k;7w^iK23>yjj`PDGnS67k|tuxNh?#km6qt~WgQIdO+r2~XtD|OZ*pY>ic^S`f1yK#nx5FR02-Gn zL(jpo#+8SWTL$(Y%mM`w>o*MBayE=Nw8UF?S}L$#3)_IHkl?R+bo*&IaxJ3am)TB( z1a!v~N==$xnIDz`wgs@+evh;coRCy7@v-Z0f!OlEeyvu#kDt(S_O&0OBynP5S5HcN z`h;h&TeLiIjqDZ8#`-Kco+jGZEGpNG$|lhjR+}aPCP@Gm(e>1i;8KpQGmBx&lH}Ud zB#nYXJ8!>)Y8jYmwJk4LAf=BnauX5Y4Eya7>+PX;_J?+k3uCZIQ7^=$C~Im_oSW;E zmmk`7NhSzJyqyNU_$KJGSkhAer9836r1$kQPY6o-PnYc{$HsTb0bcsv(sct}cH#x+Q}=+ZZZ$$Sse1JC-ejy-y)5}y@+I?0prDk3-cr9{hjZ|S z`rf}}WJzMy|CGrWxQy(H21omq*L=fVdOh%%>8SOFPg5SUX-pz$trTKQWa(D_%@*L` z2XZwJR`pXFnZs4YRm1f@xLeBabgpl5_4E8kel7a=S$c*wM;{Z*=B$`LCzd4XgnfCb z>9DFQq4um>J0_v`^s6iWAWgCl+Os+{H94BJxaqrZ3(g~(`2oIV(ksnQuhAc5?N;%U zMn<~~G1-C+`#gcCjJ4EJRVwWgy>%-Y7jyA1wae2|o`!9pv>w`oZgXfqzKtiN&2x|Z z-0Ar)KTyc;^)p;$meEyb{K<14uOo|2ibo$66Z3!7!nDLfr4ZI-`J-kNtB902K}BUY zyvDQ2c)6x~`qk-b&2FA?QL;;qMGuxV1#orgeo8gbb&RVz2yA+ng@zNSQngx|6a!I7D<9j7f zPkg#Ojy9lqByl+!b|A<3RHZK5W;j||!lYTc{+18>xsnsAAB8Um(aGL6?L&>}?(p=4 z$P@YmyQzRo#JAc-KOOocEn@x7dY7?kATnnl^tUUI-tkK53`>jgQ81m6mmhlP%9i6} zgX0VfahNWax;VHN$cZdS$6-Z0*eMa~qsuiv*45&&+Bl6S_#>{{aXK?&AhLRZsFp`t zbg7j2F=~?Cp?9xr`D83nk;sgJAjA+DkL&BdkMTYm7~S_XmcRHmxJHq*ivJGCv zW}9`SV^9}55f7}c{cTH~1dOe)N6H=(lFR6UeXSp9EYgjA z@5;7s30Oe20~6rAq4%zA`E+dXOk;2Bd%CJ!QdL)Mq^>P@Z}7Yc>tv(G|KrIz?(y~l|Rd~VI>^TAeM<8 zhvJT{@C8%S#3)`ADeRlSs=fa0zGe+QzHkc7kk7{>F z#_4`zwcT*tB{3=gu4&_wcpxvbV1&>{z-l}jdsj__g{62vVzgpc1Pv_na5i9W#uAUy zyRZ4I)=xj7{q&6A-f$2KuTlTC)r2)`)1tYbpI`gZ&v^UPMHHqKk=E>C=&~O$#g3os z3Y(5m8N|rHx|rhw*3fj1HWz%ZTD(%tbYez~J1Fq)R-gQYv5ROjL974Zj6{8pG*1ZT z?S~m4f?9j`&aznEm^lF-<@*WTGeL~6vzs|iBXMTzDc09`;;&JkUM20~)PT)GfyV~&1iGTk5;I+op z;vL@Vm%}Iai2W`|44=nSv2^MBn?$$5kgw!e_%fr$CJPW!e8cd0y57wy8QGZD`mQjV z%SQUt{ZJ~SV;K8XELu02YNe`;?moQt^R#7cPf< z4bnwuPYV625wT2+$!AZ}dSWMz&ER-(uZ?p)b@m3^l?m(EMlo}j_!?JLnIjelg#^hJ zdZ@E=lt-6QL>XpI*C}#3k~#6~Q1cBNcAL->Q`?djK!OCCKOFBF3Mb!duu;uo29muK) z?KsD$9@K-^hO#XzTvbm7uC4}y*o^wv#mKVRH-vOmkEK08C7r0^7oK3Th&A3afo;4Cz9qT23ZZ9EXNFKY=4SHg#Ab!3+pq|eC#^ePWhJOEyNE%>By1i$8?#lVU<%LXDz&XRcKqbJi2*q z4BsJKOrZKE}?Ek1>1A=YnIl8}a%Wmt* z5R61ic82=X9rtu`cB`&2-LEb%p&3eG)Y@w5#4w>;e{l|d*-rpDz1!he-=G(KI@K7R zs$eJ;JI2_k6POWi?O7#y&20+Em)TMh&9r&@0}gbsbz%$gQ&yZ1;?Vca*V5h_6W1O5 z{)B4;jXL@2=6#Gy27#8RLroTlxGyl%$P%Fq15OWv*}JdqRD#>D9@ZSl5Wy`} zvU2D}*7E50UFssVy0DBqqN;VParz+;lp;omlp?G>mgiTc8l_f-DbD`g$m9jG^XVrN zq^;|(S;ni?tQVV5dt?%dRCYG2WZ+j7 zsJ|Q$2n&p_FxRa26xI#XDX1D-?M3n_#F$(H=m@JwfTrYk%* zK}iT+gO=zf3o?157IUGzWx5r%(3K*@&K?qC=NEgs5Mrx5c+b|7^^CdFL3>mB2Zz)m zSXZkN4+#u5WAvu$lKmqQ48$T=eMPfWG$w;*>NIQCOo3g%w>3%GLeuQQ^O+mHoRzDx z1ty3lllo8hVGsDCx=4l(7K5URiIV9GdO$yG$qH=Ph`#BaBSrTk3_!Nge*$K(-1Bs8*;V-r=-PoM?H<~@Cw!`(JPIKAWUJNb)96&!0s z)y)u(K2|F7?Xo=WK|rL8P=99dd;}qL@DX3yB(hT_O^$A@GlQnY;6Wl$+Kp$`^SVRj zw7z-dJf#NOTrTi5$FOL5Od}TOvr>2LL&jI)`Z|Eiz1ey*zKrRJRrh&<6XQSYClm!E z-uzL8#fY+sFA zj_Cmq^4bAvvF%ti36#Is`?^V(0toBfS7@)5HcaHI;S%TnT3W|^2Lg|nQ{lfS@UO6R zoG~}DlmnorBJc|z93pT?iKAeibb$V;%$q|f$PpCSa-TQxfETPingX2vt_|U@GPUVB zkX>eTJ`G)&OK^70o|zG?w{x0IG$yA9$A_xi@sd4w|Gx)mYmD&%7HVgfbgHq5xmFaE zKh|@T_20F2wyjpnu8m_Eo^NY3r*5B_@`;cN!Jp0fp1J!3<68v!NhnEI9TtUM`wD(w zII(m5>Ox(LH8w^M`P+m=&9-rHwB!wS38QSWFLJF5wwsR9)0i#X(if>hT?VsE`0FL` zh32b_@6C3~9m!R|brF|zytv^?PY^V$z2Eo<*tSrsP_ytJ?C#c`3v9L{8n%A&32f}t zMKqOJVti=esBs@i?p5>s9b=b&@(uv(FS4_w{`7tf3V+&1F>exKZF#6es%{+v9lGrGjOvRstYJgHJ{z*KYw5M0ew z!?l#_$6T}}ORwQW%5f)Zqt$(M=mg zLI~CxU$z~4Mh|@)6@Y-=^%e3lc7-?!kQAebQXt{!6d7rC9w`~8l*y9$^wnbT^9X-+ zn2ZWH5~;YGLwC7=TkSKBsq?_Uv~wmCr{Hl=@3ydx8M9x<)2Q_eJv)Bg+yT#pZ{73{kS4HX-2xgH8ZeLIp zx^wNzHB=)=z!`7$clLcinnD)jC{QHmW3B9(7I3*K$d*aEz)bYLfeZN){1O~%62sX8 z?pq)&bjeO%qf3=C2584}CJ=X2z6;wdQspqUWr>$$40$u+R5zZcdQBdi$OQ=KP}Z%4=(ioy!|?%rpd-<_NCb)Ro0Gm!lBTrY3z=Dx50Kq; znw{v-lXg)w~L7T>%M}E{1vMnIH@nLcY_Iui}kVSjTla*VSBibD94VXGhXA zlGEdJR4nj1HvW?yU}p~2_R4YKl|!BA5}9hR5Qc^n2AA+f=F5qxDKXxR4r8%Q$PfHg z+rrlpGuj@8v^eq$i<^(}rwjisGDoG9)CZC2lIol_5=FjNiN9VVsu7XY8pi1`iaEUu z3bTA6zI&aM;VdQ19x4g)K?ce)aTJ=C8=QyD9V%qM&9HCQQ^ly@6(aB$QDmoO0_hmh zcS+=F8;UN2X*hWn^`)@Wq;_(1l)4r-G?e{1^A-gZ`Wr{7d_C!F!&wj|7;)182*|Du zo873J2>B&HRX66Ty(ypA&P;x)Y0OuDPWd#fKu2PI0U+RPSba)KCzFMaN%MgZtcy&pt?rS zy9{mApK7ww!!PR1v;1B*3=T(oBbTx?1~yy!(e*j%k;^O=Z_)Z}^#C_Uk=i0Z)<;o} zU_?2Ydn^*XhlAPM3ehjc=1;}%%~N+klAceK798_Q&pi{gky##)d1E_0f= zM5?1iH~@C?dA0ISovDwCxeOiIVVmXA8J6h%E$_|1G~VCnT1Gh*ww7X9-tMMsqkD^d zL8YZ0qnP&2`uZ)myi7yRYP9;pljWPbZ`g;-#xoZ!i)c-Cn@TTPeSjHzbQTi)lz?^VQpWr2mb@sY}qWr{Y5~0lck3*Bm0cj z1&XA7v?$z!cKeOlwV}#YKbq0qJ;SZ=GYNmCv?N zXQ9tbu2bvAdb?JiRj%t9)4IYQ-w=;a?4nJ&kY=E*kD9rxRH9v1g^r72&(?HWBDn@@ z!DmteP03EL^?5;E;I!$X;}@^~C^F|Cq2nXh*&Flld}@8ZjZe|cS#)?~e!X{xDC9z& zwx}v7kvfe09it#YqZ)yOoqdyZ5k88F;OvbIMufEvGo<3aB;;$W!0c1&FCe?Ah?r zfhpsIg{pb5(8d#y%06F%Z~#?yjTCK=lmopG(K8w}H1_M&l)Z^|E{NrZe)bK6GIGO`TIRvT?m! z_}C1~(F)IoV5MJuKc$enQVO~Kq(WAIP9ZIU0(I$0jqy#=eH5B^Fg|tZ!5}v0=fDJn zq?mByK|AJ1UQJ6N`A2*~&vG(`FRM&oc#1S!k7R$zQE0nBb@4`1wTVCV?nGUi>^rk7 z96ErrJYf^bf(PWG;YF7L76cx{7RC;t-F ztx4u*-r5;8=GnY2241J%O7wf;}zvo zWq&NYr?TJTWbm_h{FDrt`S#X##4Iy#DZ&g>!34*hzvjJnukq>Xbet~7g9Gs|sEzrA z2yM2eO@q27zMa-I5WCj9w14%r_~gp46Q90mv(!!48ugO8W`uf0y z@yY>UpG$zU%Ki-kz6i=Jg4?G{g~ktnr~+(dm_^dXy`qVd{cQO7Go^*$yjjD zgs|aK_P~WxgU$dR;-&^&Lgd{Hh!~3!6T`Z*l>MSaud4IO#$a=)D007WD()Ge(2AQkBwJ{0SgJjW#u6E>5&o$d|oiqokDs)^d9Pm`VcqYHIyxzX4RMp!6txnE7k z>5ZS@;yJZ4*4A9bUXv=)_+!3q%A_P#wD^hZ35@WmgYcL7aC1Sp*=}IJwphl$^IjSM zOoB2Fgjd;0s9f6H1Bd<;z|8h)I8=cNfpBQM4DtY;t?a^1FQ5b_f^J0X`fw9{c_~=0 zPhm>Z-q@=Y?^n6Hwd$M&Ou6`LRJQ_ed~zStiOPm-we+I{^F|w1A+y9s*_rSlxzP

?Uawz_a`O4PMw>e43p6*%&2;EaX7oOZ;YdO0l-_OiB0;VG zFcH`nPVHc#8)@W2s_F5U%fR7mMOC6^63A#rbM#>}bX zo|6_dhsW*Ux1-YeQ<19RQNhLW$u~=>MUV}&5TD#2Pv`JdkBATs9+=v6FmQh811q=Q z;K9{n31A%PGp4Dd;FT?fzt*XLa#tVU9ecfD6>ZpdgS54n(Ny1Xi#!M9`5bvp@@Awr zOQ?yZ^MtU5OZp#?rCx^N9;N^Qmd^njA126$sA{B9Q5r1dXxH8{^yJ8%i+*7Nwfcn| zQMag%^jK!{oSVb>#+-&QVc_z;d3}EtdCthQ^V>dW zRWMtne-S)q!JxgF&Oh-{^g=`5ZEDMPe8;C;mF;N7s`Q{MG-p*hRiwp67&c0-K%H#m zc<<9zi=J$Rdqw8;sV{>7qp~lw?O|}6);wAZBXUYnaB8Jau*2DjmL0;==Gh#NTj!i7 zpIe#+EQGb1*B`om)xUN^)oM4yhBZG!!^k@T*d(2qFX+HPUR<~TaqTLKNY$+oT~b|MD?RjUA=t%5384?|E_w= z@aoOpE)=YPfBC6r{!DKdChJY~wyF@Fq^!h*^gg0^NUs+I_c-KVEeABPFqv9u zV=sK3ofyj!yU-HIAqsV<(q;{x>D?R1>=?c2^qKK&gpQVJ=LAoa^!?qH@pByabXJKV zRT|H3e}iOMn=)qdrxNS?PG6O}@cIOwP5E5a(iCU>*aM{^9|eESV__hRzd6Azlws|j z8MlOP?6ddDBye1SE;-tT-L}*jut%!WTBe*6v@%!bv0B?hYiJ1#d8s#+dzFSFV+rlW zr|L%?S#6zx=~L(R2W`MHr7R~DHzjUlq^Rzp-pyW$+Zw zuIl|NxHb#5P$+Hh9nTv8*P%HxLWP!i+D_pH(iO9 zxCx*l{xb5ChZ|``@HCXLH__nwZE+Iy2Eku=8fuF|68IaL_=CARLjlh*qBXFpJRrKu z$~Q&hSh?3|wqw+`fp$o-lFA4k2%Sg|WVOC1dJtpYn+8@gcv#(=@pM=VHe2oy?3l96 zsQcu;e6wK61{jpi!h7xwj615fH5RBB=#+ZpXVTO!oa)c|#Y#{Yx^cjw)|@Y22=hKx z{R!=3eSDM~Aht+;i zy8|#FN3jM4)sL4-gEkM_ZlOfik~>qY;+mNN_J)RWgHKE6c6>qOI|GL76slr z>>#xU3(k|Dz{~Q(IsMkZQFKdiI~xj6Gq&4TL<3)dxqNuVdw~nu4hG)uQchlt;?yc&$M|Jh4hu+Ny9k;E%=!?bn+)cC7L){sn9$or#gQc@)J(yOfN(*(T<9t|P`1xR}&1M?0I0eEU!sFRiZyNkc z88PbI%e9+p2UjQ8Hm*mx9^^7+-E}ZL>+Xa6Dt;Behxk3jZy&#$@ww{=zeo7(=eM8V z0e%M})(D8P(8K)+bQp1Iw4KY%HHiyGzHIr<=w@kg>b=?e#2a87h7r(_OdAPYHV|1q zbs*6Ro}o_2``6*04N&jq)2JnWY#*4e;FhwIeRACFlZnzN_W%niB=P14xZlCm$JJ4l zaqdf-W@lizE=7N+vG?1Qq6M}@nxVe>fRwryHl?K8oRW?f(85HYeUK6QPcbs*nr-J^~&8ybaxTz{%#Da@3QlY z3EN$4AKZ;q?Ohle-EAG-HztmLLU|8zZR2`Q%1-u8RR$PH=?nc#Yg77xwvonsH>~f> zWdbsPt+CW@YNqi0*N+M?aQ4(DSD?WABFd@9*wh$OW7MhF+#<-iN;*yDRM71`7RXB| zr<_wOU2D%K=4>Fhmg9V12uM;p37$wvu~&NK$px3x%3# z0<%oLRk`EN$&CA?-pboF;Mll}RCVz$hzcv%ucog_=&;h7`X|1P*5<;d0i&s}_g+$Y zmmXQ&5NbNYsgWn>%?%}cy!+PX7+uGLM~tpmwrI1C7}1GO$8xvHXYXEgS5Yt8s*T?w zHyYc1zk1AQF4u;I((+|ilV&aXs8K2uS0a0A~%1$TGym5VJk_L7d@79 zD9zs+AgxX*3oX`PB<)Eg%^<0ntfbQ?|9WRj;uP(fv-e8#jWc$r(cZ91#E<=KOqCoB z2M>zk>~hp*f#Rt_cX0A>aQ-V%$B)@HNY$lvjutSX3iUeWpi6VBb;#nTF3mkmoz{-< zCEJ}tI&V=gi8@VZ`;}x9wU9G;akM0W#a=B~B450UG3%`b+tdCuU0nsmQYsL*T?8)t*f4nsoi8<=<`wsqe6w+GXM?;;#G z+k)AC)dXSH8(H2e+*nO*wgz`pgtOxL(oiESkTb;{$mT`tx0~>-Z;w}5LkB)&yF+}o zN6!gHIe*1`q@aJtL!Q+8T9sw)Oa7S_09@gmWjQUv1%Q4HjZ8vE;6@PzUMNL(H*M@k07Y)2B`w>p_ey6B1UCC{iKTbIVYPw_)cy%tfhDCi%z<^$HPoM$UI~Snnm1PRJOQN5r=bh+>ub4w|YMok?b_!T7`aQ+Mi37-{^E*^fx>Ev0V4tfM4zoR|ps?&%r<;Jo*JYWAyY z&WxSB>+&VhY>5UPDqL#b>s^;>?o)SNv})}0c3nIvg}74+v7c1PKrxk_q7d12c>>^4 zp@01}6Z*ui%Ng}px95zmy9?p+}`GYr7SPIG56D0Ets^O^CAMfH=3pC?w=o7Sw zPgY~8t^JK)zVyz>uamv^IaoZhA4?uPuFQC-?|q!Vq4HVCdc@%U77wDnJQzB0z_GD| zr$(##c)ZlyG=SG(1V3OnKz!V`A5p(|@+9)DyGlZm{CJHtfU^RDO!fg|lhNX>)u|7n zWWT_eIWWiD&5lU7>>;tu$l9V-t))DQlYl1Nu^WlrGdKqwe^$xSs&LRA>PGNHAIcOO zpIrEo=)7gk1{Nq2kAcI>JV(lm2M?&LbzykgHR96)li|hI!+c_Bx|;Q zlywNkOKmjzyt4p4YBU{Wr>FfXN|;*i0f4|Hzp9+YC$ZPs!!TBT9Dz ziDEct%By;K$#TgXBCpzICJHo|XO&`K(~$T-0|0Sj{8KY^MWufA=1g5tU_8CC_IX^N z$+Ox(S!OXz2|ehHv8m-%?|Q}#`A2J?U_-!+;#)YY9%oK_U|`N~LmKnQ1jXPUE|}=~ z;7po4LYge2(y#X4%q?i9H*@3wn>-F`l5;ga4_bnkGiGBW$k|`O_b4mnBsygY%u02% z+H(MgGKiQkPr#x^3e*V;BH16uM66?^*u}K%34FEVYiVsq0}G8~d~tPTBf($sbYKii zoV<=K^I=3=XYep5B3ToeueyW1k>+^E_312tCdjMwE4Bxp?3kWGE_cVuI3bCy*dBO0 z_7Iy0i51 zA~I$cmP<1r@0lK6oL0-;L7p+QfK%7ebu#v_^T9-3tt12PDz38{U$@Cw(pjD*SZ;@Q zTZ}J?8=LqSUB+LRntEs#qcUiV%sZrRKrCV_k-@_zSx+E6T*Mfjq+We%_&(iHW&~J! zi)hqX_Z$hWSoN=x&feExGa?k*Lrt#q^*j0N*}&H9q1ukK;5w1!w9pd|)6lf$F_Fqc zKF3zJu6*ie>~NAsH0(~}8=_(7gJY|8M(6-aG8BvBXUBS4KD{`Y6`JQxk7c%eIx(2R zON2p&?wwarC+%{yRp=X!r~dVMM3Lqsp2;RPC_il=guz2v+oo0ytiB*<^*)QxKhJH|J2&SN;pEx} zZNWmlQIo+=wT#x-5@YFr8zCRv6TIo(R<=Fsu8i>c79hGiR`cYJM$M$A)huJ%>6a%O9@D!4!2QrQ= z9T?Vq>^R5V$uvd7KH-mH`7*7fME=N>Ee_;-krW`BV0J@4+Z?eppZbj{vzjzz?u4+v zbaF!W+|GHBgmRreY?62(C zi$=X&WE!))#_R;_=2$Jnm>i=NwrVRVN()S;^r5bY(7}m_s`FUwA}_w_4_7Lt<)pz! zr1F5Thh0i2s;BZ0Iw6Z%s~x{MdlPB?YNnZD>P~ZPv3k~gz)8~Wg^a?X0tfr! zo516*B?iwR#$*N#=7KXvyR55Y7wFk_lk}1_b|QUn>2SPVA9_!)CiAjRMvBWrF|d|w zh}pA&(aAy_ZQrHjM1TL?M1H{hNVcO%R1YHf$Nv_=lR)rU2?X1R5FB&M32&z9F|mB^ zsE|8r$sS!QXQOG09ZyZPOFA)lltwt(*FoP&s7>_^vLw8vPu+n8C8Ng{oT$?G4j(<| zW+ynxfDFZc;V4?e5nIY=3+#r;$l0o0K~Xxu_tcMsP1MIq;;U?I*RuRZ!@wU~gJ@S<_g7TDIgvk5{Qo6yX0PzS&>`2p zo%o5Av+UGpSF!?xw?sLpWmoPy82>wbKajdx%RC!o@8=ch4Ar-#Z-boWLiNm@qwR-e zQ@wm5Vi32Cb$NS~+m35Ij+S>?9PPEt*84tE8%Wf0JS~{M??9;hpzL%x+H6=C-B+SEqppU%QM;*%$Oc4d8=e+(T87|&IQ z@ZbQFID7De{a>@GhXlUu&ieh=+xDl0wg*J>RY`HW{@xiTS=y5o+8pi2VC?(%Qk#pH z@eKk+pj@pK^xW)mbp9jz*euVO79Inm-?+GlnWgRd00`24@3pnpz7N`-ceK4j)At?d zeVrc;`>WoUNga38Q|3eGk7o zxcXN**6c1_>1gg;=~$4tTYvau_e#gh&;C*ZZ3eF?C%B?zebY+E;b+T>trp9M+?9^k zFj&HMhBdmEVV6fOFgryjPeO3`S!&+9((!Z` zvHOrD)}LKo%qo1tqXNTbVEEXv1mFg%+4li8Jw2|IFKH_setkpIagS}g3I+_sN14svPHul0~Pn%>gCQ#Lz~$lplO>o zxRT#um-p=luo>;ioWnPcWm)c*k6Q-d4LzBC{LQgiI(ssYa0d(FE65GE? zxA9SPSio+npa`}^cfv{QVYj1Z@v`9ny%!%V zU}6fvZE-zE4=#)ltO^mzQ!-1L;h|;r2}c6OJ$hE#Z4x$%U%uhKyW3?#H6DAIVj}JGH{5<7Z+a^CO6$Xq zZI;g!1C73L`<>iIDqC1Bs-uhbZMbrGq;fBFM^9x7%(^oiyfa++irIYDgDl*o%}BMh zN1Dv)L6k?f#&q>NMo4{wWE$3DBO(ndd48Gt;tRC(nFo2&ln|dRSWx9Wiy>Xc()a;N z`LdLGb3O)^wyh9qM@X?Ev3hsm8q0=Q;_`mFZmeqGJxt^I3p$F~#psS*h79B8 zFo$*18N{KIjl}dBRcwrATpK*>{WR#@bcVW0@}5&4#%Lwhsdrd7s#9_l^|xL96W(d` zx5w*)7fj7`1LfNBZ2n&(Ex$uSzFzaC#C2lXZ}bIsO7L}^U(doWtwjHoOacAi5+%%k zSWeeqIZ1`QDnEo6BCym4FPi28m1Xp;oVqN_@pzRhvcAZ@_T0LL!kP5au`lahst@GzDQjk^XM%U{ z>i1$+F|$F;(Mt_~eDeP#?rq?ss?J64NoEozpEE#^fPhhhVgoH2XbD3k2opdFCWa(L z3D6!$)2J=N-hfIVaVIpB?NrXGZME7~4z}0c+8%mp6~7aL2|=nLRzQdbOLdP!F$#tN zF?0XV+LHj*bMAfL_x^sbnAv-;@Aa%_J!`FJJs;_em^rl&Kti`v0ujzW<_HQA|+P&in7Hjf_}C?-2ZpRvQ^1y;VXZm&5E5j8=jp$TvoNJYumV zXhZIG02r46j0uu*7JWr#(@2q+$W{T604tTrwS-JYiUL`vBmuT7W~zi$_wQUJTm&>& za%}e7^Or0~gdsaJH?R?IhV~81Wq0Qq1x5I)=2(GaK`6i}vS3ZT+S z4WQ^+x}2>)kxyh=ssu<2U9b?vF#XrVgl6|p(+a|u3CNKjr5aU>yV`60@hwtFc(uE# zP@uVpI}*kC^wfVWWHCRJDv)&fo7`&Pyx0EvtNk^_{%W_sX4qfP+h23+uix2U^X#u* z*k4O>WpJd}2PFV?zCQ{~gjHj1tb#u=nkL^xlL4xyE+U}UhLl;$B=G=9%V-_ITrU|3 zV~ogq7kRHPv+n#RfxcWN<(gI(NoQ1Goy~OU6cU(XXAo$sogAi|rCjS*!ySUfbe{Ii zXTt$+oA+cZSG|{AgkwT|^aQ2I3Y|nee5&A5)p~HKEb>m(UGm&EhGF7kx%!Z4z{g_s zAp;nB$H_p%2BG~Z5>L4liHq$llytcu zv3Nzf6p6E&^TI={zmg2L^fKYoAX$l?0Y}ZWGTqC1<`tF=vXju;idNq#j(;`DY9+iM zM->;{{rv(tdV3}OD|uN4$A$NpC0^eF^|~L9Dz;g4Pg7!}JMSiBl*X|xhDQy4x^LCu zA_ZysNy!}Bn9h=EN*>+A?5{3tToNBy99j=V8^>8OQgd`2eV}?#)q0s%eH>nu)O*nX zI5cE~jH7+|Qa!CYV`-_ip9Yqf%e9fHKyDLV#K6=A{018xAYk~qwmg@^aN~z8E-|p) z-IkZ>Xv-U)*p_!qqR~9fOpy$AEs_-R!vmE$HE($8zmQ4c-zSBBCa=?iixKefK_Yj^ z8Nk=O0sR~9k;1o0Bdmq035-TTF!|4_0Wn6LG$A9nMYZ9Yi`rl;cROmAXVzZah@o{m zRReaeekVUCS8{In4d$%Wo3XMBm7tAxbJZ@k=^|RGakgJ8YH~gmqum0>HmN8wVm(ly z`Wc7@nr+-Q8XsP9XU{a}6k9WsjE+Ejle013RrQ32%~?M7sgti`qRLfMeTQItzB}UY znr--xySQ-L@MG>&Pe}<=r8nZR8QBnS)wTHAg|6E0>b87W9emKZ>Yl~OvA50>`Aehu zbH+!bCCdJ#HXD*M6LJ)7+myhK$dq{NoZLR!sN{G~?KmBm&scgGI&WlOEifKPVFae!%Qt4QfM@-(cC|AED; z(E82>l9kI?q72Mmbz0R90pY#u#MRAhpyntYxKTPFYAA@Ebor(V5IsSIDpco#uI)L~ zw53rXmeT%j?~)0M40NERFABfym_hAnF|=>wl(==QF&5G;1?X`AIy@?;#;vFIw8Y97 zOpQkA&IJEaYPFbuj{IuYZbDX9H`o9%W z$jcQ{uG(k6IvFx;%=ml>#5YDC7j%8Tn76<*g_V)XMti#T{`lygH96nt&)o19IL9X4 z$W$o>3qLJ3#|k!-z9M-UK9GO7fmWWDQWiN-W_7}}F|J{yZgbc5uYZ>LeN?4frj&yc zB&F^wj2yTS{~@|!kZsuJktwf9p^*_TDyR_Ig)SMDzriMuZCcH7sQOsB!A9O<7A%(P zan{el6OMB%B{k|Tyia+6u2?WKH)Q1nuD8-0p!1*#yuowf&4Yhw^sAoD}6qcx-QEV;ZvX-&}L-gUn z{Iu#=G=2i{ivwZ}%*7q%%)mx>;Ksnxbi`56JC7ZT-WGRHdG`EJg1y?cN`+9(C9gJ$ zYf={Dy2|7T*1e6&bO)nuRJvvQ^~^$fFW$HpJ%P-~eAoIUb7*611pm#=;ba8(;m|Ik ze1tl<{&gnAw?^Ux0YrsvI0CgzAVNX1^&C(#Zj4Y)O|oOtN0>tCnVX!);@)i@Mu}mQ z+o6A*Q%!PAe0WlmGa{Bf-n1Sd(`<6-!>wPyUoobAP4}8_Zl7s=PfEkuiDT0NK{vPv zZisE&iSU1C1p^GbcoBLr0sY3X&9`&t72 z2R%Q$k6g&r{%OuE^hUcP0liVd7gz&mfc{cwF)1j|urjn84a=!bGAy@t#k_giFuptu zIYb);mOC`resP7awO?XF<9MknuxbHyXqX)s8%p4%_T1OF5^i@fxaOulUw@#c*%R#M zR7OFrGZoFDx<~y4cJ+JHu&T;Px_?ts-5(o-}+IdO|V=pP8%M<*ADxoV9`nAg5 zb#eBt&=!?#zLsBmu1cF$d+si6MD4kCo`&^=(@NQah3=9xzPohqYzYB5fNZFYKpN%^ zEwo=ULVjMFoHM?ukF32vW{N&^wn|PvCHVv*XJo6W8&uRvE3yeG5~5NhYK5mhPM+Um zL&&~;#t%I8pUYGC)c-@C>pk^?Ji8O#$V=8=5Pg7JHu*m)*RUnN zFR1Ue&axqIa?Y-A7CfkLAyYJ>i-^!8e9QRtuJAmxnn~xW5A$pE7JDB09V4UNZdkK4 zFXNfVpL^asPs2jk&$T-<+Wn0rbbxB~J}*b_3Rav@20;Q9M+5#xJi&IN+x=a1#t}+j z##Wy3Ta(?o9))K3u4urJXnJtF)Xy!%YBbB zesUQyD^h4UrD<+oqyL2BJgSt`87ibOtc2K-%hHC@{X*f@!8QC(XFk)_sJ*cf17$w$arJ*_3u*?|6tc}T>yRpRU( z0UugMwcLUS{PpUHr8k0uh`Iq%geo3^sap(_Nwi+9qc9LTshTwwh8gNUs--?{H0Y*} zketPlsiOU2J!_{bFR!in=)3wT&}1N5)MTum4MzJW=}7`G+BzFdEe@>)!Zb}l87O!r ze5GNn@zk$^uMO(d|4qOzbgZ282r*qtswnK4{)|`$37Y)RxQg2RyC*mLJKFub7z#(| zkAx!?G$ zX&XSYHK}I~aaD2V-Dsl~qHQtX#UVy49v#tDszJKP9+#oUO6!c8g98I(wkNgf&MW=9 zf`682jbKBhpsS&IQ@liInWq2Hqqh}e!0RIsc5In!oXrVTJc2#FcQ-nn1ZoqNvnzzwmLr}DJ%N@ye=kWLB@%4f^*Mb29dwkrkfn>0M@xgar$BTelEKI(Io^ z;I~}5r>wYoGI3U_o!F>(iOLUYMGegx3rt;6u;=0<1&qty^{sT%`}l z?Pt=DOZGHObTcU1C_Kdcuy97ea0+@Gk2EbHvS&u?_vI4=dYEx zy;YO_D5*rI0%f@c_Trq_8{{vm3++s`N4wtNm;G8{fW=GiMP@2`1VVolDS#D16S(sz z=w1=v6bGGK2Ezw2yCS*u<5FGu$Hpw@8h7?oxvwgd!+w9;oXp|Mk1lO=1n>81DR`uH zzL}jo&HCsWm{M5Q%y--02jv_4xIKAXq&8eZ2J_Bj+@~!K z&bb?FbDPr|I)Y0!drc>iX$TM=wDz*}H8e}LF&pM=T0ntmG{dy?zu-1@I2&fwtc#kl z{7cO!w40GG;`(eUTifT@PME@NG-N_u&cCd=yG#F?kvk85hC{ouN}kqTa4rq8kEqFQ z3Z=J%4j(aN6r$bgQ zP;oj?!G%kAI3~aL#tiKoipbyAa%Qa7CeB!=U17ZjPF*Z|_2F^BWHdWM^C5!m{^S_e zm#`N>yqG{mayW?)abm=GY<9=2{HFNnt!2nj5fm)i^r|C|<^6R}eHPywP1F|T-$4c2 z3pyK2c1DuhO43T_;e^Uc*S=v`iP}|$EsBnHmpUmPfFG1%+n@I`z|xAcu&JeL_#!^9nJcjgqq~=%{djP_33ph`y#K( zk|T4|5uPf6;}dF_LUurAG|SMs0%b=mJX{5M@nzEyX0Mbi8&3s0PlGte$f@Yo)(s30 z#@4D%vx&FrIj=%gpi(Ok;i~E*43B5X!pb351kvN9B!|lp_Eov&NaQ;gMY>gFtRp{jJOAWXG^^>d)kY2&of)(fE9#i(#Zcn`bMD4>>Zir|sI+ZGurw5F)7RvI%v z4$v7$utwCO*+k0;#4hL#EOrIb1B=~(#bW~V#MU6vLEb9I&*;~8gu8r1F^1r$OyDdh zytPZXLhXQckSmm~m6}`^CqHHA`IQaL>t_Y?zd+7)-7qn?XpP6Nt71p~cpy(?3VfZ+ z|0LLEt6^bJG3Ky7X5FKL^TR(zu>L!+@2aO+0bxm)Dav-8D^=C1_<8w?eSsTzSa#NB zFuH_?p`!KS!neB#Gd6>KI|NP#n0t-^=AJnt9wv+LymndLcNH20mbn7U+<_|s%f=YD zdIxljEm$0ALRwp}ASA;5z+?tI0P(B?rG~zsI*5}OEP-qrYGCQN|SGT)*b>eYW=N!6|ElOaA0FlNcxCewRbvTP3y< zu%vGF-|TQaFBmN!-dn)z?JFLFx(FIVAe9M6*O^%^IS%ZQbXqp`Vd})4z1te{A3}ku z(j%*qBdffTRcY3Z&?NA;LZzV_f!H0;5`+%ui`%>|a?hB^J?W8qCRjqeBq5hUSa~lt zKSq=3NcLNtXd{vR2<%q0njd4=p#{j?K@(MM2l0`}htNELB;u}BZvvMh>(57Iwd86- z*`&X%NL_Hp}s8S$Rl(?`9 zB??`!mA}QR7l!7Sx|j#ZsF(bKb_jP$OSa~#ci~L2uSm8IeOFCl(H)!R10{uhKH6fj(1?9QM*`We0#oh-QIY zMOF6^cw<+WQ`3iBsIZ!;&kbKvvGcLzJMl2d2N_3ce3%WH{K7HH`SS!` z0{+JX`V*W&^dT%w8X?&Dy4D&E(gh}44j#fELS6DQ;9y<(-oQYH=1W^lo9Jy5fyvEX-FoXGEGn?b4h%ziPsagFsK3uKiQg_($X2zl3I!m!XB5}D#OKaGso0ws|nU^593Oh~M)`JKa4|DSwCMm$KIv2we^ zcv%|BCFV^lr${~^mh4%8pdYI8P76{4B2`B6fvW9`&;UzECz@cfAXN!iI( z#|4AQWWXPHu;A$;u%TX}Xj^1NBdwO3Nl*Q)QW8h^?S-Ym21#yrJ2Ij^+UfBmFgeBh zpYXML9?am&sA;78vdG&RDSsemn|etBZWjtT&i;i{NsQ;gzws9FKN9gj5neGP1}iMF z`sJSIQnTNxG|mE$o%#8O|H;@u!P6pYRMHqtk|X>0KQV-rKL69@o`#22DNjf%w6WOY zEaYr#{rEz>X2(;JsQ{Maf70{7y-=RezYt@BXJ7}zK3=~3k7xJDaKh;GH2f=F8bI^y zp3N&ILBVzz!>oeGgZ^iH3K7>_YnE(}__sF=n^k)b#}!o6=19o{w2fZ*t*d#Gy?|r& zezTiAS=|Dphk>sDnJjUS58_S+k`w;FY(!e!N!YiYZiRwiK3Dg}CdDB6P2> zU%S@^HwFva3WN1iC0C*8-(os0!bvT`sXy%N-}H7k!4^dUgZP|l`kx$xe!%~P=Yen1 zyYOU6EO@vmqUc{le>^gI0Jk;vvk%MZlEfDkW%{3BuUHDSMGyFCEu%VaDNihxcKvo> zwG!3er>!2o%fr{&lqgS%d%SkqXmuAd+WM19%tesHdXd+_WTD&6EdL|R?l6NwnnjRq z6oPJbuch;)_H`MuuWQ0W6XcyUjefaROI9NTn=Gm{wNSE13Ob28)&{pv-E*(L(khcW zrdhdz>1^wTOAjfk8tyQ~h;MLAP!a2H`-0lUF8hM(UBjPc`)5MJ8z{Q`mP;Ofj=>*!&ZtEE-)xJQ&uJTWW zxgysH1^9;CDs9o@d-AjxJqeRK`k0Y)YbTZk2#%4!Kt>Yq=VEFQ$LgUs+Z>9vSRXGQ}ra}RGo)%lv=Oi#PNxNYt)FZVnkXLs}ctoMRs3O^w}}x< z@}*nb`BeHiW!A4%j2Oh^_%Np2o`)6mxU|{@v+{=kQ;bP&B$q;P)lO&9*BZ;|9#fa*p&&kA7zGk+L=FW>`h{VFh1%9cW12NAhWXSLSm?7^5 zLCn1M)OMK~`F&wdGZ#xCSsS3O)@IVSR^4$s9Nw%0f|EiNTGeS3=QdNM5VeQShj5vt zOST?o=*q-!)}G>=9Q=$+)OVeHFU)0AY=6s~58s&k!<`jW(X-=UF zj_;>;_9QzI1vi?LAtzzF?1Z7Q)Yq~mP7(m%*2q-JZygsFQEPQZRrAmIs zKjF(>k^XL#pK#(U(&Mf2gK{l3B0!Nr9|HcaaHy1F(O*Z6_Ir-iX$am<>664 z)~w+=>bRH<#scuWG6X8_(uNHc%YSyO;*l8%%w2)$isX-&c?uJr0S5M4^WfVqc@sJu z8S(gJ5hBckp@gtomyGU%$`lZz8<{HUEDKm;I2oe@#XJh z#&E0PxEbQBY-f{VSk)mGWcG_9v5|KeobWeUu#=4rL@{M?u+m=cv~TXzhVcUjtBRvgCe z64sfeW6FvxF~uF&=n^5RjcC0FH+p~fyWc5+f0oazwx;Ct#-!N=`kCO5#tPcQsCG1^ z#WW6`wNO8!>?#T4Xa(EDx)Wm{#Dk2ZN2m~SEkCy;YZ;^`fL}xNtCHPQ{|Ts)-P~9o z_Xy3;!#w@zCVx|Xb6bOaXJ}c$3e)rKZqKt$-|lsnWwqoL1|Lq5sOpT{tR>fith%vB|06J;wA9)K@ngMB55`HE;h`enI6{IlB#2TPAB-0`h23rS>P2<0 zC0r2PEWz#mS_nZTyW={p>q}~b^{Eo-+!wq(!#F=`U2^S$R0`jZI7rxIoEx={+EOWc zf9=N^joz@c&Ec5kdvo<`W>BEJOzt7aJSvC3$NA*&mt5s|1cV9xjrGk7)|kCpm67YY ztF*Dkrn?;4NON74($C7Up4lpMqRsj>iCYo+pU5H4Rzg@;g=r_>RAmYKoAA0uqjoc> zkI--HBa~WiB3DwrRT2f}R+%cbScI}QLK{eAi{QKZguU5E_t zvelt&m20z&D-U%gmob2!{yt@T524C3t4b(0qTnJL<+xne+ z;BFa<$Q_~I@P1J-GbzR_I8HHlc1%WkEBMC=GC=zA@Yk?ESl=wJKW`LaMF7^-RGUVm zdya1`rNzwk^{)MxjY~dPlMa19V}>v}s~Ibeivm`M%y!z1a&E6Qub)*Jm^>>l?`HiJ1~-nAB=j7c>DmR(v93H{%i12w-WWkNVF~~b&K~!C zKSkBL-Ffr%vpAr)zD^E`#7|b(8Q?OznOkd!QTRV|rV3to8oonmEWliB*_ZHF{4^2) z?bE6<1nqx{^&pX|vqnJAWx+mbeatwT%c>Y}KPefI&Ju9%M^DK!2 zORI{_+~o{@@FKf=d&13jZ97W;xarx}rJl*#j`Nfi z{5Wn;aW-NunC-0$B>VRfoENrKOs){}nv|vm>0=rj-7iQ)$c~U1R0EAl7vj>`=zUS* zK8&b2I@s*tYVHm;+@TsiR_1eYB*5wz%z0z_rHrpBJP;#ERc^6-k1S<=zPo4pFHt*&A)mnbC+6%3tDrjHy>kYml z&o=XAwerg4LhzNh`7(#(cD+g`M&>*vtc6ZcKd1W{TxAPw2=(w$Il7c80&(UOw|zvBlcR@;SzOL4sG^6zes1}m`UAB)9S$kPZw{3}7*7RRceuXtN-}}Xe=-a$sYh5b_v`zv}yVrX*}sA{^a+%sr1 zZua8tvZtb_9@z&+)x_y`@X0#|f@OtK<$r@_Wl_hqlI=T8Kvl3?)TP8*H(&O4Q+g#+ zMv{5IM&sEJBYLN~V>V!gGqUD=->0jORL9_n`Bg}VvJ9ZTjuautg#)&e-;7@P{*5dw_{ zVVdSZI%YwF! zVY(Mlt(+V6{ad9a=w&1T%kWap3Hd1C^)m3d!?8-~y1E?tAfP-K*zNY6*(NocZ@|JE z8t|I)T}HDDquihQ&IAxzKs~azo>8r zrnK|(Y2);lBC0i3E9~>YSZP?`iuSk_gIm0Exr<<4OU>3VRU&znVK`n?v~>)_A}k3r zcQ=zAF{2(I47dt?61$gf)R~h}_9h7v%@MxJzNj;L*Xs%9Yuh9oC#UidVTgu$(f|^V zol`MVI|!=G8OLCyI$x{IN!QQXkI&3p5q)4?%4#d#-H85xoRULnk9r@ zApFj}L8vb=Z(6#3UVeHYD5h0nJq9$@;IJ@6M!ZCZ&>YeN5o_%)738DAV!MMY4z+F+ z6(L5@m)(PYN+0Gste>z}F+B&ao1W!Z&O$4TyYTB4A4tA^I0@ViZ{0qdDny|84gCYYMkua6k8 z?Yfs-#(2quPfp=e*~D5Mylslz2V>~fg&*d<=xcuw!WPj|8d|l4GyArQg`Mzn`qxYC z#|40TUpAy{L>KSb-L{1Xc^*~Y+%b^ukfigU&h~d_3x{vWE4NZ|IH>{Oy+{{zd6Sr% zA{Bk+oxoJ705=y=W&i_cJL=l?V+I2Ms;}UtWU^4q-j(6xd~7^o+V)`uUj0)QIb21q zFV|y{q>#l~4k2bjQ+!iGV`_d=YGZs;OH*ooeTRN(T0!4ZMEY$8#Fr~9mJ0puCU<>@ zsdvM8{;4WxSktiSd$b{W60U#V}0j)E#=vyr#swo9a6n-8up-eU6LkS3b>MN7Kb<IYlMTw>%d{?!h6`(`%R-5HCU#u zUfJ32eokR}bR!gH^t6Yrgk0b&ckMo0QEo&%Cs{U4MsouGsMaM|-;pw|3cpZWlkU*Q znI#BUlg{(2=Ijn*`Xpi0_BvlTTde8y#H>j(W==9s>G5sONx?NSqOG|xon@oR-I$85 zCM>5~1%F4AqcI*2wf=M_kJF#*>;|N0UWpgUxUBb_74E8sS*+N7%u#Jl&OX9Q8zrf6 z9K4yZSYQt>#DHpByx*LgoG-)sbq;~^?u|Qhs4ad{J_=wKF&?2JbqZ2$nHamq1Bf>qSv=;su5n2q{9YHOW|xC=C`I zFA5gCUBqc2GTG8tWzmk=g`Fi7l*j%sDx8c2-c5G%F&p#)%)I(q*B)bMhY5Ll50y^? ztTk^7T(es6rF_csD_*47;KG=qLb=9}NPw@RobArtMb;K|7@;W5)p$jClwgAYZPb_a zbhDia>$MKDydwlV$ORfzFI*r{2V4?s^B4IK9iHm@9!RJw0yul<^|B17PdU%dz8ReL-QTc}*zP$;&OUt~CS zS5h;EN}KzoGikx4v7%0tp}HXw-lL>`YVg$ml-;+m`Z$k;ns~286U!x-(iT5C-vGU< zR|47o-I-mk^^NiQrPlC&vG-q|`UNDH6bws$NA0n$+Ll#~*wk=3LN^dIo;KBHcO>n` za~j2&73oqUt-Mg0d8;e`%#q8F7=P=kJ0?a98Ohdbv)@W;(XS*i(dLjk&O#nXK0$GT zk`v(+VBQa`!W4RazS)+aQ-U9gWDJma!yfT}H9$%e8r>J==dyF1+qcuR`OmZ}P;wd_ z0Zv1-KFxt%QJL%Nl3sI&=hXtvsHbhCik6SPc4gdT&P>gfxjBxtu})NHdX2mLT#Z~V zo%TGv+tm_sidTYOf2I8lb@%%R~2 z&b;e8VDq#E)gzIO zT|ue|(NrYKmo(tNq`E>%+k}pAZgb zo3ouEXv>Rn5MbgMpl!&c;JO(2RtP|&n3n=~=KE(O5kcW_tPo~s_#6Vu3H%ZTH}7zP z<~7%xIU+UbaTwRx;6%{|ib#eDWGIs{Ji?*=|2sv!3c7rOqP|{tfueSAS8#x$-hP3i zDzcDt5jBOx3LP~I4msbrh>|*CQ{K8#8aO~eM?e#WfC}lvjKB&OS|SID=h2N(;%S_i z{H3J;LRiLnTV^i>`=U$1_mH2q8-5DW!eN6ApJsh4+VJ+!PZ=}J9r#{f`yMyD^WdEe zsoy6fhJbr;#$>vKFU73g%5tEdR#ayFe8q)Ej|(rBY}{;n9&a&v`!-DS`O|79V3;Ve zX~^9AX8mmAvVOHL>=RSqY33WUEHtTQ!5bbk5Tlk1@e?hv|9(dLePPi!&R$k68bY^w zS8B_QUO!4ly_GsEPR5|u65;Axqd=Bc(Q5R5TN`Ke&eH58RK#in7e`s{eV$a|$tv%q z5p^I=c;r_^Y(VpnCaJ|qsJ)%-qSb4uBKu=7ox#4#De^Nh9AjZxG6Z0t>><=<$ZBEZ z#o840b)McC_Y~WW&Wi5NxF^($)#fJ{Rx{uy%VH*6fb+n>ac7k#D*(ZSP=8o2? zy`&v<8a<+FJD+T?<>H%~;%MQ%))1^JTel5Gayfp0BeKF3cw0h4!tp^ljbelK2l4zZ z^zu`h!Ozmkm7YyQ_`8*$O=SdaT1L6}M*5b{SopGDXI6MIrtLhCwV&n7c_`~J z1kcn@zyKk^sV7MoE`}j6^~_arR-LoNaoVWtFlK^s;9A^q=A#!kAk+op3hVbZt z#9p*JLb0cQZPH$+DpX{0q>HDei^h0mtJ#{xjNT@%9Ksa89gYYzdaU(huvl$SqRJM} zz>okDLZ1RGCCKTi|B%V7~bLA4*Gsd zmbeS9n{0a?O?j@&2WC8=55YEzBZFJijp+_{A zaAmNn%WE`y6HwUMDEv%BZmGtAghRx!eGk z)bLq+rx1izW~o+RYb_*2Al;h6BdUd$K&Pz|5fikz*te1EB1z$kR9M=Y=AoP(gL`_n zUCPP(2dWEK*PR0kHmxi49RQs?^$C=M2!W@;!HZY~|5j68W^D7m-bQ~vsv+~LT?j4D z<4StxnSCfPjWRyy50rT2VCFv^s-sDEgnp&*l*jBq*pr}c5x%_<)&8)A6s6=V=TP3U_r5u%3o0U`Y9EGqN_=A=$w zAH^aPSd==^^f=$>ndO?NX-)Il5+!PPagx!#%O=i~7b);gtLTqvgtpB|jZ4H0o2O|( z>Uc>wTN2(lkg&Fw!`7}pOyiT9yUl1MQC9+&s&<3>O0oQ&kk+o16~dy z2MDJ0x;70LkAA7Qc%Dtlt9{=6%1uZiSxv(|&+hTO5|B@gh_dMrDj|t2;M5lR6 z(L9xbq}j9cxIplR@W&1xqRN;(h(n_Vhtf??P)l|7#0w6k;##B9bW{eMvu86b5Vg=2HbV9Qp!0%e z;7IEqYX=CR^*OJB$)PIQQuW^LX;{olpyEX64xX?jL=h2{idwl^Y=1@Z$ZQpsm&qrj z``-tP65{j>bhe1!V@Ui79sJ*_uskpH8G3Pf$1Es#zFpcxRhm%M z@OET%X0cFywT{as_Im8hxB*1RrAHs^c0aFQt>bbHy;Ld~`=nG^y;!%>?DZWkU3jI~ z(YxIGpA?`BdfMIDD{@oHw6!1RgLWA_tkpwhhc81o9Ud2r2)8gp!~>IcogfpEMWlR| zlxtG0JEXX%mCoN+U$z+b@_+?-6qJ)i#vGUA-?M!gv*Et1?wnxw>Qp0YK|U^YPX!`# z=2_#MYxVW)f$)x!oV9|hT#~$MYCp#T2{?zX%IMGh`F}^jpEWve( zT$!}mTI-7WY}L|1xXRLyz$|tF#{6V=`z*Bw2oL4M6`Yku1VOT5n6uo*w=tR+(^|zz z5S7FMUUa6S3EL(JzdFXg>>NKqIMc@XOu`)7CaXAC zdgkrsEiT_JZv7_XTm}qtN9~$X=AHxL;DT7!nFCiY*)gX-LBE;?TsbN{CU|q#V9et? zc9ELMc!?}`$ricT%`unLpR?dJzRcL8iYu?%+{j$fCeby%sMd;J<8L(Pw5Q=A%E@9| zTG88b-c>ylk9)T)ptUrhD_7iH6zW} z51c&n&XpSNUEp--S4yL<504Js(xt+mv0E!qs=fEnOiFQc_lD}UU7|VP9a!iF$SBkz zwT*`(JU+hbsjsC5C~GizzgbRdx2SKEe<8naetiqw`tl~g)mY%^Zvl#pyssHwTjRkL zletI1^gx*QB#ct;3c7Q4%t=TOU#Ajz0qaGn%3XZFAiX;9aj;&sJAqwUAL>zaCgT#T zJAzpLDdM*ZvdLm9>w%dv4RkW6z70}VIr4BQdCcRSm;9om7Cb15ykp0!EYY$P3f<}x z1$aBsc(PH3)Gn9M{&}uR@IVaG+O;13_#6yrY6J1#n?zjG++& zNKP9;a&v@WJ7@9*Lr)Y1=cS?Y^h6P@MvD;RKWaYs;-o)oUB=z+G&MKW{2|3Q2@m8- z|C9j#R+&lM-pV8rTE?2pHvP`#G%`8N~^i{i{ zjtEU(CE}Icsyr0x^^d#%Nbe8dTTny0l)&Ahk~+%Q)Bat`a_+`vn}6l`hApWU>(zoovJfofbh8@rV2T?)aNkRP9U>&ikz z9MpBYIy%Yi4wMWq=w7)DFb`%GLCr?CeC#pze2JQ!HgE020q1<*r{Zngyn>wzB0ZiT zmC&r7R<33Jyr5Kc%JYzqH^@%QS=W5y>`XYWYvZeDmWwXSGM8_eTc2&5jncN!G821) z3t|*;+akp68X*;-&VjE@Ph`9CMyMKEzS-v)M0q2X1i*HNoglxyx7X*2|g} z41t0Shk`_)AVXYh#?;mf7Yg#`MYLg5aL$%F=9}RR73U4!`X$0quNqD8z#JDg1CR;M zxuGxM4lIc2gSO$tD?cW1Q(s`t4ZWcmfDU2gX*@i^ZwMSKlCQ%8%)*$!Eq%cy*)R;e zE&Z;AM*mD!4#K2MOM5U2?`f!&F6({KXcYl)e7DMC(m%MEyup&8#Sg$c^}=GZP%S1Z z=AVGgVgkI^DMGtWc=keTb%Y(@3k|f#uCU;AF!@vSUIC?uHUr;V_;p8DHNYAlCCvSR zHC};L5#~2urS-K3J`uv4qrm!xfR#8p-*r*sy{=#stP3v!tKS>II8}Nd=aviTowij? z{Q@_{$QdWJ$Y>d99P4Ij1~-dA&WzHsnk3_m$bpjvF2ivj*V1L|;STb^$Fe}D$d180 z93E;GyG$R~4_EiYQ%W>i)8vfkUTygf>6^^4Qt(%$fr}(*4(oS>vG{_aSIJ2j?lZc{ zd3`iz!eE?F<>)5I2zk%SrT`+}KXDowecFb+)fMyn)HW!_BnTc8Kcs?CK&Vt^Jf|5<{I-&ncjWD4b#9yjs;gQX`B<_uit|Okb)B|`Bx@|Co*^JWDlyv&`y2j zsD2MQ^du6I^vYW}z^g3+4oPlyoA};-#G9|g3r8wqJ0S)8t2G%fg~vx&@ci8Lz3sZI5OtdW8#xZ6=2~iX_wjm> zprWw1MCJZxWJGSz$`=hOwzhYnTK5I6o_|F==CNfzEjr&hY9Thi;4=8ftsEuBStpPx zwDpL8{R6U=Z=EAkt2ahZMRD0>z7`gt#lkS`bElqEqCCp`+OXc^JBid_WUkTfF+S=p z!$X2{<9tArD)nn@SI5=K%e0gd>*O9=5~B{M2#-~%Q{KbsA+9~d$dI*{6!U zmbSfB%>t2oxtu^Jw8G79Z=pCjOqi#e=(#(?AV*fi^3LMzmt|927+*#$|PF zqcjd)EWF&5l_=9m$v;8ykFWi7!evZ%fW8a_ezBSlZB0D|*q3_-s@m1IuHVm>Ia+eimD{ecY(_(|N8_PXi*+;DVu(OWz^iyzgPG~7F3inzm zmRjM-qHQP%t+y7kBl4e!#aJA7s!063SW9OK;Z=E4sC(UJAsx-~cgd8_chNd_xJYdW z^3#?DkYiJJCnx@k+*@5}uF?m`T`;9-nN2Zx_^fZ%7TN*&C-j^R;**&dJIXoyXo=`J5n(yS)a07J+ zA>mXxrWTA59*f&|(9`g9X;O)oD~Q;|A)Uf}`#cZb%@bzqMpv~n913>-!n7vo z!FBNRVpn&G)>qJfQajjGD1K@8H6=}VuXat}v-Xd9c_KDog6jL;=Jmv?6(%5IR%QYo)VmMx!c#iZn!m{>22;}RK~P3aTt-Ih1PyJUm}K>(C(0t z=VThD9+`1bZ8Qa|R8!=sUP_@^e=W?e{lJ;c~9exP1$qy)XK8YK6(Hl|@bx6GQ# zMRGY2)NoxQR=u`?)0KE!f6HVMZryTu^ZqWQIi~f@PMOeJdQ&VmCn)A4 z$QPN8o{`z-(4MR`iIDQ&Ahb=&e~t2AHd&o4uc-? zo4d*o$DqST)T-yA-p@SVE^!b#vC{KeGcl#-c-iZ;UE4IrjPIvapLxn>G;~3T$T(5G zIGb_rIi;a{A&z4|*#tjqzY+2#?GdN6TqYSusP%bX9WQJ`&Z*M#eY}C5Z;@);r#v5$ zcUa19>Zk7a2|RcWRko9C(RgXym#ZGr;3$){v_(tk+WcHSv- z$!C?Gw8;2wfR`&j9%Cd6RNV*iKB;>qtb&~{Jdp=v_f1+g!{JEUC(KrX?Qw1+$xcyA z-lmT6b{&fj(_{(TS?MWz>xJdytn}=9i;BJBXroUPD;;>3zmp8#h`jq%*N=VXnbbxs<6c;Dm|+{JV|R$zOX40LnGoM$6{#2BzYL;;2#{Lwa(r6L^h7$`?Qusr0$|R zidsB&x$1cNL8=^hkMQ2=Y_|VV)g#NPis#fP?6hwJ{QXqBhic0Y(u>FowSa-r2;Fs# zo-9`VZC1&@-$5$4^Zd3mJb7Q-&lAKi zXIiW}Bh^vFs<6DA;_q*g3wbRbjPK%I{E=`t^G96Ja-S%(Nsacfv0GGQHtwe1v~tnw zG-ZDVu68@3?wO5u8MLGpqZgt-hDAg`7mNc3vAMoeCJq@+;hVSg!1JfTNM;J-xMqJ^ zrRN-FeR8DIbCTyN%3cd*AKYo~L9mZ(b?0oH*+x-nzT`%zh)X;s-eBth&-14TOI?&= zH+Ic_>XXukjqO&gzfig=>G>GML7lrrmJwB`bKccUnZ!v_>ey8(y-hnp*FX{p+@Q$H z3i;g#=~#0xgO^@&_jtKG$`yG?&jUZFRh}QdCsU99|M0K8gf`{P_yHxqEhTqRf_59AjuI}M39Ck3Ou)WOK>i!rwdqEeBedi^OWQt?n8bna zIfuR#vhJ<>K&G42c74mSl&f1_kgg={V}zJtYr1L6uoR&^JXr)j^DN!-!sX-$Jxae+ z1{w2)CkaxnjR$~N)oF$L1NaCrV%c$918K86n*=@7h z#4%HjGZS{ba1yKwJwyf@I*c2DTk{q`+xV6Z=cJu#@YTeqWj92^ltVT+z3j*d4%|_vS==oO3qDBL+f#j*SzT#4eVn z86bA4i503%buA7^MBT@=(x4A&%U+e`Dtt|@BepN%`H>t52GT`S)a|Tgy)Ao{^#8O< zn4i_1l+^>0{QrKn0Ftr{zYVO$N7sivw2xxXyxVea2>T{;r_q(+1PjX|J7ig5N$F)t zVF^j&_y7750+~3=Se{{7`uj%p($ zaW5E6!Z-(+LMw@uK>3i|p+zcS)v-BX#fKMW)mLta9{8ju5&=gYwW~AgI&4(>zU(!- zCw)~j6&-o)ZKu7{s{2@Qml|4}**gd6S;Hijg3vAeT*@xk($=tw!2ZO4=(im1IQ(fi zB^q$UKMsHVUWX<3I)|qE- zJ=_)gPiB>sPrjsPE$xPXaJJN9d;Ut{$HKn~Jw;^bMrdj0q0U1`CQXJFWLFs5diWot zIsRDZ%gd?hF}uQ9`df!TZH=ao4AaTbn%JQiw;nzbj+HEY{DCx~g;MRC08L6JF!k_B z;l#w-tr{r9u2LfYK}6_qSNQLphe9dj)utAM3>0=Siz*X(g}Jk}JHuIxLu@9e&orj|_4L1Z24j)~DK^Ihj5qtzR^`u{>ThdbG`HQ=AyKr7IU4t*Am-tYuS%Q{O&lI#zr?)P@|A^V{GVsH z(QXTM*1jzB*Rg4!?8FW*Q08HDNag&zs|#k0=}$-M&Ge5!A~c*Z7>u~MNH@bkD}d`^ z1qx1}3K%TtDlEFqI)A&MOLC{bx3aFry$SWT!*{zc;ZE+|E2wz%7&Eu)aJLYO!(VhB zZWSg%-3Q3DJB&S>Dp`r+58ss;-wMakjTzY;sJu8@kH%#{8*S`0Z(jsiHD_mR zD4%pdiVyo2+n!wRvU+}?UvQp%J`%M^YmoOV@eIPIUq zv|PKr%LWVnnv|zsIZ$xAD%dzT7TtAIm2TaAp_0zH1polZ0WqL>?C97Te9NTX!Axgn;m?opE!%lyJ&GLLlr|`^%##izMMs8PuYpPQROks6@R3iQ?dJiuy2$#G*z(9fwbr9xZW;(}?%G zC;^32sDBp}sC!e6KaEW=jHX;+j&3?^9olc}Aq?$&xAQPq8<)3})$th3mYB}NWCk(y zAy8jMyAOwU6PJTG>YJp5+zTZn2+oK82eF;+zIRxaO3wR|qD6}7uFkl5l%CzwnVuh# zQ$~(Ix$_X$OgOOJbhN~)@_0;y{UXsXsav~UW_fr~PJtvYm&A)Y{azb}|C8Z5e6pP6 z<>u~aPmHrM10B&XX8{>GP#D%D@nzta$mO(yvOCkmd{X+U&OH+$+|=3#ss=c)g-{Vg)Td~5gItoe

1gF~lM<+I*V=^RIaHzAQtEki2Sz&d?qjRDR6nYxuly03~AXjl$kzScn zBh!RSGf|qL@`b$Rq3TO2k@Vq5$(W%sQdP93kl3LQ6da01g~T`o?_(tWK|5UDL$}-E z(%A53BQ#oDIA5DNUzM?xvL1_jqbeNqclnMb4p2;mw)BO?qlb;aDDVcxjCoR zvW$k#$lzNcxxBsr7MIj|oS3?nbm5?$P4@ioF;YY_gZKS$mHhiI{n?9K8e3?@~0?DvDnby1QeOt%z|TWhds}<7b`2KKeecm(_tkWbfGzViS=)#wi@cj zNo@Q!j+Xo2t4Up~=2LZ&wXg!L+vN?fE-khy2TBUBkR(Ur7T?HK%n5HNTwVewusW&Q z9iC}@zeJD-0_848vnfiwQYj%*l>n^@U)9r^q5lICR{Z4xyw)UD=NK_LVvHZdWWvvJ za+TvzIx`FXbL3CBHGXsL*Q>i0XTNpkQ$)!xZ_d-Nu+nZGxN5fVkEb$FY@fJ3`YW*Y z{0H)jp9Z*sKf@h&PrkB~Wb}8f``*Ca@-0T22l*+)DY?4z#i@(>Z*edD1ggh!-z>KZ z`JM89!5sB6N;WHw``*`3Ds^MP^Om=z(q!NJ`nO9j6_JxG9eqJgZYHDzqgd9{Xncr! zCfkw=+S={54>S9=Jmd$n#3OgI-2XR_w3X~BqJ!A3#oK8xji_V5NN&|L1UgsqLDNM> zd$<+pxTvbUfBeD>IKtqJg+_)t1r~)GhMG7Se@C$#MG=Gy6=e_*f#6>M^oR@% zw2ap&cc0p=C$}fJd>-4}ecD|&?avm7A)ukCYv7+kbM4}oQThNTGT-;-+&jadwY#t1 zpYJ}u-*-n3=iYPApU*kxbN+qK=g(Yfjp2Zgc(Z>EA5t|99`gcYq`#!NBgn!j{$S5X zb2*QKOBi+qVu#ul(7~?22sPOiXiSyvz^yi^i;|h#efTNVG+XaPAVk!-DRf6D1j_e< z#C#30%?mNv7x=rSYLtn4FuO)t?J50-RN=q2P2s~%p^_LwGSzB?6y-@q!dFm5(CDdA zsB|YjG7&tv-qO_d3IZ%^5J4y&zou>mY9?w~&2;Z3`>1W5Fg#70D-w=HTa(}srgyzC z(s*MX8e0RocVBx5_#rFh7ZrFEtI#v9%60chZ$OFzY~ZZ5j<^hLIHXKpw;!^FRe#Hy!#^bSNTl3DgjT9AhXWEk<^-fQB>|k5HK@l5jhu zr6E<$W(B00Y`n2PLVA!&zRy$g6LQI#yZ(yFT>;r0IRmyL;+2yaqXQ$oUG>0l>v$kXJxQNpPW_RCYfK_YmAzIK54Z7k0!{ z^g-SjM;hliMP$r5N|7*gYVkldrilVa9{u~s7-qEI1J#c+)-x@|l}0`a_&5=h-*}Mu zXwyVQ&slV6(bdEQ6`;|8wgJwE@j$y6=lyt~&5?8LERyHrb+b4KPVyG$FR0PU38+w% zS91{N39WdBBB`KSs6^^*3p0m6)CvpvHHri4Wg%7OYj{#unXzTV4OB(DFLbt-1nHONI0+`COvAi`=!WF z*vy6nZVaZSF{C;|f|X%JpN_2*%ZzV;faY}R)DrjmA9_qK0MLkx-=pDJW7dDn#pAjj z`XtYUK_$oV3aFDFjzhgkA65?(PsT~rhvOfd#l^$B4{yHm8$5c0@f*@r26nHpjivz~ z=Up`9$s8%egW(ACb*eDE^qRn*K8)iR0DyRVwj|8P%#RZv5T+j4aPhio5%^7yN&^|| zh5Nv&1!cg~IKjJ|9t*+V^A!z#B)>B8aPxuziBjy3?xymSF%lN?< z0CWqa)Ae=dhFQ;72DHq`z6t$E_o81k{5?OCNNv>^#0nYF#UoTjoDl#vp}Wlf<3TY6 z=?kH9Rp_qzipBx>O0=#~G@vkObTw}yp&P@8W8V1DGBFTgvJCf$aMs;9g82 zwgdXW{op@Bi(u(Tpca8{tA9`k4bn06{SZIVbg6$oLl-K#`Yn7wqw=eP?7EuWNM2PT zee=P`oRChv*!3SkHP-gJ=U$}+lG>kk4b=WWp(`o{hF@Z4VKDF;ioIxBhx^%D}}!+MOSkLWgRq0l#6>39)WkNnKA{VR?yh%NS?*o z~H5EQQYUY?b(7h_jhC?^fJ{k&Lp91-b ziU9>)4@-ZE9h%JlN0YsTWVDSf)d=AZ)8*8(Mbe9l8JbWTx@v*Go3AnDM+JDsJsT#8 zJl(qiy1mW1cfU=9*z{PnJzc|RS!Z$(FY0j?p{~$;Ywj=fU0CS*_@X5cjH0ZihelGy z;Y9L>j*53-lM{zhh~yJhguUaB`DPx(8S_;2E|W&L7dwV-XQ*y(n?vi{lQ0&RwGqWm z%C>cTzjdl~d%@@OZXkoCEvYBjcdVLEw;BnIW@r8oFdR>~7J2~LfvIySAfo|@ka8Ok zy1meH(Vg^TAT$#QEqSm9KQRybH)GLnU8LMpvKVP4q#nDyr<~#U)Y&?HQ`i@%Nnvg4 z)D4N~7SN^bwU(Tsic9N0(11fOThBxjW>KdAt7w~@hs_-nTEsxg=YGbx#`LAk31

Sd(D)(5StfuaJrx8i$}X990b8h#?xWF%AoMmP5)&lsx~mO-DCbM_Vvsx_$}vCRiA zZ25M}QEF?%!)M*QM`yDP_;Hk)gxW@J8I8DN*hLfa3^m46;|0vv&nM!=32mPoRj73;ZzJ*-D-wCYcM?n88wvTk! zSLG4Gf~aT8i05gW!>AC#csUBqtRwR5nxSYO?`$FNc&2D`Wj8xnX7LqBDb~_v1=RA* z6JpHzO@`=Wm=bg^?FX4dvoHfZ!bCBU6+R#~;DE9zmZZ!uW0? zpRO!COUtJd3X)a|Z(xXIg_(oaXey}=+d0@r?uKIJA*JbRD3bI2TgKrjWgOz%uCA%e zShDF!*`{XsA}|DB9KWV##t;%0I7?utak@=5#YdJDgQ4;p3P1EIye37sGp+ENQnB{5W zw@Uf-gBsLf|F#kKZ+-Y_BrX=EfFX>HJjCng@|w9n0)S2HkLGcXlEw}KFaeJ^!c+M$ zNly1+818pTBV@RwtH31>Nl>Al-KR9onA`X3s(J!T2$x!s4)m=(@Vv=ue+IP&D*%c^ zZ8@kC)#h7i&e^_+Rv3+V5OoxL4xY0PM#B+~mBW>(%Y zTbnA?zl2~AM+G7sx@K$K<2nj>LX^a@EOXmeBG04iB=CfW*3xoQ^TM-&Ys6xM7QlDOkT;A0a0!%A^ywtD z0t*WrY=-w=#3w4)4-_M(3JpPmHBtmJu~nRbu#aOIw63r^Q>Oeqc%h2e8fQ44g{?19 z=tKb;g1&(zxY&qjAn}NekuqSf6wb)%y)GW9Kj){a2T1`|w_L);IbHW-ToulM1;AKU@EmANb^D zTsUGoH-1gCyTcljHEN$F67lv>6+uk7-RA7ipFuC8u}#HP1vc{;AgLm42kMbB?ip(>7W>abU4+J5}^EUT;;e%y-cV8ZQHu)jnp^OGzf+#(I8ZOc0CrbgI;V@2+X|CU#5jQYKe% zh117p7;o^42!3b>HeX&?2-0aMmL=H#KnMBvm?W5IXEIKSGJI~b;Y2rh-RiR8Rc35% z4V*KXJ*YXSaR*h>XzJsqq)I6=$o&1Tqs9j8Tlld%sMhoq=3X9BzT%+L?>dUlu4&Y2 z(j7{;-m|G>5lG*!e4~Zwh~`5W=j1hH=n{nNd!!Xn%2`ytgLqbk5+w9lA}oz!3#o?s z&JOYDj4`}Aae;u|bG>rq{mtHF z7dF@{_`Izzy^i<1tqwZ*=WWd{&D&}!%G+wm&)d2-cl!%9Y@htc-6+*jLx1fxG^MZ` zMIX-o$8C5K5vj^sGaEDoD*?NA~4yelVX$c5Ogi*s_PLUuh*pI)ck`n*`d1XcMxDShw5G+ z#tB*e$2%cYNM$^<*(5{%EkO@a{NW1pqor8{{WV+@6(00wvRI8w7HfaQtrZZvF)(`& zF#!h-QpD^A&fSIg0iLupL7kT7^mhv)^<|)84e}Nivu=j;%0Zwu(n{O#fWt>tR~G9T zjXWEBNoQpn278Jw3tzmSx{|I^FS8Tsy+~&?8Fb80M;a8#1v5o*C{B8Zq8s&trGH-I9j+P$4f+_Z6>Nq<@LT&f{+ggPg z9!JDIXccIB-uy021Sq7BPS1mlgz1+`BtpgnG!E2(focPLa-^#CvgDe8;&6@J4XArR zzo#b*$)(~(G?jCVyD5P5x}w$K8+$e73FSZ2qFNSM{lq|Kb8p_x(0U*=Hky#p{{j(k5CKYG<#8+r5_n+) ziJ(THP~%lB_AtNFGl#nVpu=+6)Af6?IaBn-18ycd_|r$I>%&>s0|$rp`xtf{f_4XMU!1iuq4T$C$rDYGHn^w4eF;()-LWl77$pQt8*s=Nb9bwQhs- zZ|r@O^fUMb0@H0PRa5A@v^Gl{*<&p|7PCiMQ>9hxkyctMlReT>Et%OPEzeRSd!%() zdXzoBPLC7WEM#4{aasxs$&j9)5XMfikd7%JH0xmq3U<7^{tY#S$>Swug5zHx zDZH+j^oN07N6Hot|vJ z1=>vg>X?fDvi;6D-MjHH)iu}UoQ-klf%bkG zT5mENK+Y#U$H!;f%|Ee(tg}RH(d*v*z9;!n+>+ddk)na}f|&~;Na9HK!5J(F)LQk- zk-Q%nPC5Y>4E4`F$x(>ymw-&@kFa<77M~v6p@r4|mC8M!RPOfiM(njvP~YtO?za#? zhce4=e72ahH}Q7RXT~rxw(ay45B4utmcS5d7@UG7X=@}JCR@XdVjDj%UksJQw)cqa z=(8CXm`KNOniTpx^^AmObq0x5P57vLSDk6=^ufr|iB*T?fDp#GtXNqzbu;544PCF} zBkojf`{X>QK=IS<)*l}{t&j!W)*t(8vv1-O70SFvNoDuCiJfE!vcJ)E(_;uy?MUw{ zR4sy)m;Qwru2KfDG`5E@Ae_z;pyA*~7y>f56YFyZ;u7pS^nqIX0_%qugJevTv!tVm zxX~s4C$%HxV=x-8A8D&IxUKtqf)OBFud-c^7~J{EN>zZtP&4N+pOI!gM2E&RsW1+W zA8d!W%gR&k##Irv?I1G1R9!7DFl+}RpgKh*L=t=>XJkt>t6ZQ)gDETL!`KH3p+iE| zPKoJSxkNfxgjVlwxWILr6FxfDv(5P{Q2ye~EZ2RyXWzs|3sTwmTQ=UNXl@<7Bm>&;Z5MUMfo01D&9dd$UB7=F zSI`GRW8j2QP|^xjt+wl$vWKqjbcMh4=I06AHu2#5^wrj-F4KaUOV#8Gui1ANdWymw zd+0m-8RR!1>>6AMUc%b8_}31D)#=(fK3it{BH!9MzP3h{?I6qjuqSt{Z=Sd;mXEE` z*LKjCVE7MDtZi}}tak+AK_B{B>-ma2UPYO;a8|etaHVkha4X<4;l$+UFW8e`xBy>* zFTuYI|1$h;_}%cY!M_H-4}KqfsNZ{`-=QW+0Z!Ge>QkCSbWx>0glmF3LwTFfj?E3* zsK(yUa_yB6TF1Rx1kK@&>)@wA=~+y_(h1>%oe*^M>dtFzHQIOOH- zlk0MyL{gt5;}c>~Sf178DQ4PQO9Z{#*gMShuZ`EZ*kZ+G9!R-pyF`hvZq zwqE^!x*?7nHl*(`$Ur_~^Oel7yHN|J`J+OIkP;sSS3PZz8oO$MFj(c*orM_!x>w_cYw1#HbwAr!BXoe$7T?=zx^Xb&r)HhRJuWs=-Kv! zadIrgK2o{2}%Zqe*R8ynQI{W z9E)4Ph>uALgEO4Y>B1ex<4}8vy%sjQ_Dr=s#5u33-Urb@7uW5*F1FNnjjHbq3~UO7 zQel&({(N9fD=8_}<33O$rmDpb)v?WI7-qy``B~>n#5>j!MQj?+5<8NTkZz!o(kYFre4INU@zA$6C0vGT`IkM7p)9z&Rvp` z4pl&3W2FPvZ^fuNCeobH+unQ1*3LolEEvLUv^?H2yDC>1Yxxel=M~`1>5cfwPvz{A& zT!M60^fBrYP*KrKK$BiqSW|C(XMcBp20w*uqI!%2d|(C`%yHtt9nLW6yAT>89-Do^ zO5+|}?i`C#Oid5ELCX)9BrpWp{UI&<1s<}paG4CvecJpcx*Gd<1v&($J19D-S8ns1 zVTy7$rrM1Um^Bs*hP(<4`WRykqD8tz$y+_LkZ}q3)M>au0AY`4EZZ~1CSZtxOa0~c zCnMYV@e6+$U8nmMea)6&_615onfxJw(vhZbX|JgtLqyGwkU+2}ayyyNp^*(-m0m^A z8}Yb}b1MZ`(KdJO*1^p>? zo4q;eZSzJ?*_t#%P1`_QR4i=)+HB`5p-$Y`!ru|u0u|2Sqy{?UDq92H;!wSAQo$h2 ze$<5+*ZOc|xcX~+b58F40tV*o72Om}4Dl3<(+pDSRZWpc7Zq{~rOhlaI!AkhGZF8) zy(s}{Vu!5610tegia&JYi7CFiyN|` zgX#UfpW#H~caZd#xOimtg+SN$yD`YT

Y_2wR9B>LM}dLv%`l8y~$|{2$Wbg}%|JGrOKd6^&!xA*^BG>9+m`b#`M-mAMmd zkk62@N!2k1R(8H)T&7f5K|Bua} zf33s*|1pD}K&SkVoI#riYk~;8|JgIB4CfiZ={#;WY6Ae7hbk!@3Cj^vS+_(&wkyWz1^Pujy?ND zNORBD4kgI1HnNi-cBFSh+y)zAd;0DzzOe>g8#|oW#0K!{*dRfb=yd)eYKw28?Ruc@ zCl6!)1p-W64`?n;)~+em>zeic*vdWbJS&>R#o=x%$53hNSLm1JJP2hoHYt)Ho00+> zM-qf|L@e22tMj)|&2Z4Pm9yf={VSVgD~EUIEABk)!mf31-I}4gQ0@FpY$P8MJBbgU z*}%USo1}X_7TLv`HtS7I|GXN&HI2>6Bm&Z@@?1G_e2U3KTGdk*w^(5i>PsjJ4- zjcX<@)br?L`tGh^BnJS_F|jd~V{8?B?k*2-nf5%?P&*8C3zz8~w8hMZiu1SSla2HM z&kpcvx^N(aW#m1SkzIIGxF0MZW;rF_;|xL0e`Yxcx^s4at+?R);Gu?CXkC9(_Y<(_ zxeC=?uYtPkcznQa-tDf*`eB8`yMBUTrFji#UI>MvdC{e=LQU7R_+~S|>?$0V)fJED z!eLz{@a)3tP8EZnzdmtFtY+Q5$8kTQbmY3XXEr$hWUDZ6{2V!7$Kx&(%I`<+zsJF= zVi$-_+w6K>^_!rU7KUWTnD(rLCB9z2O{xJ??*g=q-L!{lt$Xe*#BgR624p>MyB?sc z`4E>W&W}($Bn$3F?G0<119TsC!#>H;s_UsBe^(BdoApbl-+K#*9B$t342+G@y|5G& zl0SXhwE)j9^B$D50Q%l|2xA_=`FfAT)R`*Tr_fcxc32RGAu#jH6~4XgD$_eL=GO(4 z$`le~ep66InnGt$S-Ka#1{%+L$BFjadL54+MI8WN)Rh&mA8FdHtKNg4ilE;1kG6e6 ztO~pFh4itg^X%pWF=j;gS40p^yXz}g>WEUG>ulUsI=5HHz(dRYp3g zZ7T~FuR1FB;Y{Z;zb^vT?0pE;?M(|1&8^XOc51Q{(8>%(d~rXcKt#mq5)WRJxfx-y8`LNyG zgz9#^hIsdyUv>ykM6wG)akvY6Sxn!hYOXnEPv5>d-7zodfUN;t&995;Z|e4f z)Hd8^sfB`w7MT5Vga#N-;ZV{+hmu<42Tt2s&c3W4eDyAWo9Rto+uGNCUFPk!itSwG z2V(ZN*|S}ymtE#vgmIP6R~_`NqYAk;8+%P>klZRv5`}ire4@>AqP}aqcv969_$^GV z*Iyd1Y7T6X32umQ7uxOSGnFlPtTcNFVAK^;R9_@CphbRoNrH%8F(MMgs}Q> zLbR>oq)T|nW;uyEo^lEI*es{eQx%_sm1KC{LDT1)b&hjHp3C&PQ{yt7qUq!$I?`1M z%qR~9vGF9~7;IkK&5eL55Spo4^6V9@&M|q+7_6m<11A(`_2aBiYic^y9%*v;u` z)?$E(ro+0uNG>#r=`GMvO@iAmg0{dp$3^$-BO(RKs9U3(5e!V_oH{&Ee?&JE>5C-n zssl}1^X#zcI43|fy=Y(WYhU6|ltQMO|EDS<5;k>%w*74aR|dBe?h@R6-$7dn+)v>) z!QFuChWmFob^_22u3v@z#O#}|))PCPrqT30_=-cpoxC>;)JexIjSg5dmUVuWK1=c z_>si1BwN8RgL-)%AyZaV4=ZAzcRj&^OrmfGdd?NK0mM(#F7f}47Vgf1q4~{As|Faq zRGK4sy229outduS_@xf=GZdxqlSuvi86+h(D2)r01n5Zt0k5kKO`a-QK~96*(hwF% zQtVN2A*SL2-|kRPC-%1s(mXJ`Vot-AEcnR1vIDS{V)kKBu*XtCp>%0`0S^$(C4m@x zUSZGfbS{mlI3bRu5LjKr&l8Km!^c8Rg%mxNfkHg#d?cbkwnoY7ILwGZlA}|qzYjEM zw5T%>T<;=r`S9U|4k%Nn+j*U;{+{N*`mC;B zPvSUF=rDKaiwGqG^y#8-J)9bJUyZ6xXL}B<1sWzd;w}2at3JQpP%njA_jO|X#0m6@ zn4s1Iz4_I%n75*16DY@^Q3u6g8JggNr8-cq4N$>mAZjv;i2qNefJ{^{hS5J!Jo4b zq#ho(n?kGC#}~fXyhA`dYO`%(z=s9jxhF)mETvKoTt+_P0qZt&ql;rQQa5o$X1a#Zo zakWFkNuQ})ah7|_AdH3drG>$GXb6l94(SQ^XKlye=Z-_S<`V9X=7CX8u4UmKSXu~1 z5))@74#LR7J6X6V?x8^#8|lj-2IJu&Ffur#|8#%Wb_{;*ILu)5%EjuA=7CX8p3lMu z%Ow~WyW-c(JPlTH{7`} zcw)olYDfoIxCh3=1j{?_xPvgV@Rx_i{d5q<&Gh9{24m|G7#SSWa`$I#$KdCVRtBS2 zE;HQGJnbkaH?VLIjGq#WT6f$*7+LtW?+Fr5+}1%DPtliF2IG+-Ffur#58R)%9fO}c zjxZR#a(U4m%>$#Hd=m>FESF%+cgG!sv6aG8J#jHTA%Mz|j0j|4AxN1KB9x$1OnRhF z(n^Xht(AQA7o8Kh8JM_(I`Bc-i*0D;el;d!Tyi2i1O`jLLg49%;WRY0^syaGw&YV@ zTIA(cnmL#kG+k>X(w~&MU*pLOfx!~p+z;lZ;m)wUG+e3&jxQ+f!?(bJz+h?2khCb@ zljby~xq*qy8+HT+OBZP-^n(3w)W7AHJXrnzcuOAboBpo)Z)SPjQvZS>d7=I+uiNUM zcnch?{*#8Jy{-N6JT>zLby)ZYtDg8LYnWHzKypn@p4awXhBVA?|T@91eH3-=cN z(bd?n!K#-6ZJj#nzJ6N#b=9S`=R!1~0W(a1!%_89a@b@RrmHsN705DKP&r!x;Su=M#-0Yk?rf3A%FLm0Piol+V8L-!6=NN7=vHLLy_&aFNfI< zsUr`G?NwhAi6ih3EQ*C7F1S6yL$Ln!p~4Yo+nv9XEx`yH`i8L3j!ofHk>6Cdb3x=8 z9Yx_YJRl5~LS@9+4jF6>KH{OF0I;KDV7vV(xkTt+28>uM835hMt}@l|J=Bi8G}v3ZE+1f*%28p8*U~3h$FGIWnD3mfpMu0SyHat{JNAmyJI4tCA0p&H}M)2cn*A0Cpj-5K$c-h?M+BdErMWYS!WlXh?}| zxVtr}GSpoLwgsH^5%!GhLu@TRXM%zOAeE#%#fCc#p2SL3SmLwoo*2jrtDqZ@4s5#s z&;K!a+dwDC|B-n6-+u#d|NDO{-qMkq<|q&9o^oT%KN@uxx&cv8S9%^Bum&j}>s&*} z)rOAm*yY`Y?yAexsDga*K47m7w^D^nP$)vCq`{ZzpR$DPCGwFee0T~dz5M7DemqEq z!%_skQ12Xl&>xcy2gNXVI0FhtI{iV$^)1vaUFg^x0{;9pewYFdXmR?VgTuS6PtT9E z^^FGOe;<@a+j7HG2b`n8B$Rp<#;nhM3B{=Kwq_N|OI_la54{mMf`^BaZO8Cb$pu=6 z?LbIA@mrA=5os8)(mn=7t$eU;R{nlEF9v2vr09y&#A+aN(+a|Ombf*1DhyNN1Cvk5yKa{7a-yp>k4qZ3<(W z0e_D#>Z#7mZrDdMn%NC9Nw4OZ*d&aFgd%aF3I2@pVFV-YbZ+yrBAe`njw?`6O}dto z#rkKo@;8_hi_dPL=Ltlt^P`De=6r)%6g}V&%_E>7){ksp)px3^&grZ!S73df-82A| zu<$Js z(|riVLqv{mi>_lp0OE8R;%A0MAL}BEcOWkInnobiDtBG|V$2#o9aVducKTGm1c9*^ z0TO3$n?>7e(jcLghupf>r_G{yyvH;`&tJ@|vW)n~1zB$Yyejhu&dM5{DfqsLJqK{d zIbk7GPP@B8LZFyFqW%&TuoD9yB2-h<;wTJYtYG{ZC12Tl4HzWEpe;r1i=cl+Q1u~q2u1oSiv-3gITF+> z=$~gn8g~$M8S5e8R7XRGV%48h_qIi+Xl&>7Q&PyjqDiA_RzfuHkhU%*MC}f#?@}p2 zzJo!2EGX;{LZllQP}L12mZq`I)#`Vt-H)1G8uz1`bSTOZe4%8~A7u~E#7KUR1A>Ir z^V67|Cn?55kMLT^hCE31(x{yG!WRMafUViru5sR>yxt5v8Jjm^+cxuCU?i-DhR2V2?MJRv2 zZH3_+w2F72FEt&Rq@87W<_i3^typocgE*5rGE<#X#S{ak3IZ?%DMQXu?6Y36#grB9 z?<>wysMZu<3(*I>DVs;m4)$fhj^n)d*@=r$O|e;KFGyh*}lgfO>kzhBt^q z+0HBXjuw6DC0|@8L^klF*!(%>-qFs{qJP-_Aeus)!{tY`uH7HP+WfGNgw&4lPQSAW zsb|My`eI{|;2$EbNa8~QY_1Pw5p0*K;Yc8r>268dr6E?>;A-f+(WOb>Blj%Z#on%; zVuk_ojD}7?HLoXQzVeqFUF-3s8W(0HgNCoLHcgUyg&OH2#K0)aa--2=8I2y5(P#|N zXk`aDt+Z$EhUR;}R7}V{2mJnJo-@T0O79cpPtHfYL!TL^gpL~$s*#o7L&-eTtKZRi z_HNt70j8aL7sT8@l_A89RRf-)dw}-7;?<5T;N{ew8PBFzok+74^t?kVSDwo!qKm+= z237Ci3}y#cZPoQw1Xct=o49!OOpyK&$muSoX}VjU#?<0*Fi9SRAW097C$=U%=G~rs zG|d^CD(;J{ug>n~{bBGKg}1Hku7+GPG?;dyTx;9fYbin}_!gn>yBpwVcaz5Q@sP*4 zTGO)}+79$mKDwT)THl17`yLa}aU>euzz0H=h?#x5_)TQZjqKm!7vE9)`ZU(9by<69Z&`;v3N2YOGrRyI|V}f+mpR zBb8|?HeuQ>8^AiL{iGDVW;AR0T(I$t-E;*d&AlLTTsL_{#?WJ?>^A3$B# zHPK`ML#Aj7mzQQC0%}~qq$-i#7j}6^^aXhn0QuaRSl_a+XhGIbRDZ&wJSUmM#+nWh ztf@#s2t5js&YD2F(8@eU%JLoo$=|akt?hCMb_)8AE3zVWEf#SkB1$)sfgd`%L5lf$ z0QS6DuEDaZc&VKb!KBT~l7|WHe5?p^?|hQOH$ep4VG6;Wqel7zI#&jH0(L3!17Ot- zfg?kxP`hV96xx-9q)IPbr-&Gl8mX48S8DfOBoZ|G2ax4iQ^+j<>lwh%+Jj37f|snn zhT#!fRb-G2QX~U51R6}{g>S;X-1CM+E+0C0p;7X?Rdkg(&3~J*g6$z@+IPQ{xg~%+^~Mc zL(FFYm*e@A7{5~Fz{dk0biW06>Eyq~`8I!{i_dVb1 zY8%nql^Q7Keh!CJ?gLH_k_F1<=STW!4kqE222nFna;Xv&OaG%R6e~4#g?y8 zr1UAn?eDl;?l#d?Cu5XQs21`TqcB@_iIK;|=QE^V>_PRR$3oCcFR|RQ zi6;eM%NmwoQy+{P^B7jLLO<9MCP3vviA^0Nibf66Wzk*YJwLA3?;sS4VNifgStRCa zFj=oTHiOwCamb3bM1O}ClA8(Y3FtTuDigOPg!|TkF#@_c<2kOa4>xbARX3iZT-SnX)X4=iW)JdZRh#B2!nzPM`uEfSdbp0 z%?|lbJi{;`Yvm=-NCz>-*(ArPf*d3CiaIuaoM>~=YrO%jsK$Q#)bhxB)=1bf$U@NF z(lg%mC+pml?vji37KhyjM5^qvqXB8jg=V2hsqU6k=SDMNZ%gG& z2ZKKJ{0l+2)YM3HEQJ8{)C}xhAX`qV15+jQCkVyfuv_;o47E^*5g{1AbD#)9ZS3Zc zMRr4Byqvcz5ll>}Bz_Ks#)YbU02ZIW`vs_|V)~5VASHqgZasw=;xtmS&6A2p9{P_@ z?%?s6#-%3&D%t`VMrx(t04iCLEe-~Y7L@YP`X}h2b4d?rDFO0Ij5hH)v zX@pT&4Cz8RL)sC_TBU0q;@K*KaX!+Dje>5A4l-DCAe1%!ap9^H@>$BO=5gN71&)_1 z1cItIB;I&jSJQ;HnlYZoi8a_qnC95vG7i|GG+-R*m=!U3zl%HV4DeDh4PMYV+8-nI zm!B68MwKz%HuyVm6+X@XGx z9j*iJ)Qv{d={T^`p=-f4^&;=znB+Ujsm5PJ9*7OXHL!pSld<`H2qHB1=iO_>sUGTJ z2AaZy)P~w5-^L`BPh*lgsxiqAob&Nr2XSVW`HX5_LzesCtj6xVgDUX1s9|be*Gp?* zyf7?ZyA4~jg#ehpvuXfguuy^`jQMH`4>EQOQFxu0-;d$tNKzM$PL(Fy$;L=@4`!Gk zS%dV&k*tRRg`e}G{A+`8uitT7-1j|kp((O!9Nh}+Z4o;Pn)PN-J#eMoSTBV*)JbdV z%#Lb}_UjKX*Y|-=)uj8l!)}HSv-gP4tM|06YlX2r&z^qCE?l%%oX1|h({6rQSN&V; zrYnNpBd#wN34xF~ss06mZKjtOi-&CIhSlFt)igu>9m3W$YgOi#^QvON=w;qscBlBQ zqvF8MonT4>uUDhpv|Vh{?cEq4reDIw`N&7qxzR0QC&LEprhQmNWf3h|qA1UJ(xJYK zH0;_`$T<)Cwq<^=S;XVjjjqj`1r{$%AlSQLyA6oDdJr1jgOtM9_+ID=$3!dK2?g20 z2q?$~w~<}A)OCzf8V7`MF@1Y{Z=!}px+|uP*t{MAo0lL#!#6hLU2h?x0}=zmoA!zi z51Q&gLjXhQo1##sI#vTU;O}hCbSxg#X;0sUoNW6w;1mTHm?-Sl?VS#OFR`*;_g^P% zmTjD4>RrZOFnfJR%wF4&D+c{GPOSLQzMk=W4YQecf!}L4_`Tk-RlLJh?wd2mWqQ+P zuCrCtaVWg9*3Y*NP}kHEyH_7FAa<|xZYWp=F1J~_iOb6s_&r8W{gv^mV}U3AXiJRv zcEF$BN9uWu;B?)r=4+b#v3 zbeTV7d36g%Y!&-B#`D!pJYU_6=PSJ%Bb9i*zIXF{eaCpdzC%@e7|&M^@qFP(!+5?B zE1vOueNQ}JJ;d|%9W2&V62DjacR<5V2(H=F&jX~udVBgspyMTHXdX#hkd&V+aeVx=rmua_xP?bNGxd)@`@{E0( zW8DZfnGh=53!zR2o*G0bg;mf9C72AOz$V$}VIVa2fr0Q~Z#xF7s?Uv27!`d=elk8e zDL*$prLcUu!9Zx*^*4}dimd|36hVk&H+?6Jk?{z`1UD+l?1SE@MC^lQy~nK~ibWjA3x?F+naHv!WsE;&0Hrg>u-(#To$m_mHSbZ~X<$+HLW zkLW&b_Av*;z+vSBdA9oq#b4yCjfpCs!PO+uw=oebNunRu^*bDU3rA&vadxxH4$Xvu zRDVTZsHwU#5C=2^jxPn%m&U%RkHAn=&=kcn#!1^b^2M5#>PSx~OJktAl z)kTbP*sVqcCCyr?zLUl+(sUBbrW9B|A2SuD>Z~ND!1$Uv$CGM%Hm1xyy669hb$qsa+2WyssRp4g&#B6eEE2+>^UOtu-jTk7#542d%2U+(UYEyk1eWDuU(T`mqP)67FH0XJs_?`wL zbT+Ab8i)egr0HoO0%()IrvbC7t;x`fO&$a}@#rfadY@5&TQWYvfr#V7WHlZ9Xk#lM zXlxPg3}PfvEu)1x{TK;UJVa=y0>K%PahwwT0~?*bf%_?2HMfjMEF8PBd+*t==V1pD zUD~rBgv=!$up)c|8+!IbYYxk+XTL8!fzaBs-$zN=yPv>g#*cX4z=LATM-0SPe!TGr z06PT0S_!ZfAhiOpczDRdKqh7Y*7C46GBD5iEL+NtW>3C+04{-aL39Lcur>IH}%k!`D;{9Y?p#?7$>?AMDlURt*46UQ4#(B^@@ZY zs4nR(D0zJH-*Q4(d^4DvyABqM-6@@Rl>gP~zh%l=(Dse#fTd+OgY#b8Z zQ(d=FRj6N<_ZCN3+N!|=p=~SKm!kG>^~D2qK&uZPe9%0t)be7}p9llckPiVA16n&2 zems1w@l#eHE)bn8tHYuJDom7XFn513eS#0zJiwzV)HBkiTD}ELR z#)RHuVJ8DW!~Sg;4t%7CQMwO{dMcWIqSSI3r(LQGaVtbbACv3yB(nlia}Y4MxU19R zu1<@)IxRj-UsFw^8<#-S`jo#LmMZ4Mk_|_J=im*%FsDNu}WwH zKeuaenP|_1GLAbOQ%4%VwWoI@Gqx9Ikk~3Nqvm|5V*xE4M*zxo_tF9FA-p;XXIuqMO*`2(7b zYH<3G5~IWoYJ8{dvDm8WL95!MumC|;0u`n@khDb2A?k<}4Sy8k2NKkhKrlt&_up}x zRt@6RR!260`gb&}NjxwC#*J~SQ5GQ9XT1oMLhA5_Ni;3jaLz8KKLkhKB^hZ@)ezXs zn?e5o-DhvYAD(>IKstEt54f;p82Dh^GcTHO}mW zPC~FHwM942?Bs_-)3VD|=G)sL)-NS3M68s-KNuhypralOG)ULNaJ;K~&q32iTJJf< zOkplKt=lPH?>W`+rTFBUFf}R>nDNiwKU?6RE%474_-70JvjzUy0{?7*{~axm|Ee!% zg?kz9*Ki-f9fms(cMXo)?#t=nLg6OEJq9-qE)%W@&I-33?tQpcxRY=f;JV=k;55JV zU;hu+k9c~}o0k~6em*D#00$%gw#=uR7ON5&TmkC!0_YB-txR>F6 z57z>B0d4?J`|n5x7Xg5-_Y85Co3N!jX&hSc<=Ns9_f z4B-Lc2?C#AQdU0GuxPSjuBA*U&nYTcTfQoP<3z)}$%d54hFptbcEM`FQcz~eEiv40 zSTfm=WGS+gQ{wbR^JW^>uU|j8NX{8yZkWiAgIkqTTwKB%CY0m3LrzXswn>5oP6qlBj@Rn7)CD&jnD=R586cig)aF*f{ zVRgPiPJ*%`XUqaO-P$&T0C!V+G1|;>;;SFEm)9B`Qb5jktvmY7PQE;IDOH4j$52CcV1G$ zym=JI?JqW^Ek+(3`4m16na;zLX(7iwZcgQr7R*md!23MYd~W`N#a#M)Gncwx31?oA z%q7pw;LPdsxU_UAZAnfBEc0el9`j907AGW9P7FTp=k(N!1Xe!EoHK7hB72!*TI`8~ zXYX)N{CNu!lIPEz$C017U;)d6o}WM&<^>t7tf;ApTm)PwTrivlu0J}F>xR1k*A90W zt`2S=+#a}h;C939f_oG0b-3+tFT=eEw-v4yt{Tn?w+W7iD}u{~%Y;jXOM#1rn+i7( zE*vftE*Oq}E1q7wBK7H_Df-yOMVr`f#flaA_kH-`hZim^UaViavO1Vs$a;@OANt3~ z*H?v~TKW0i^7H>2zhVD3f0}>rPi~Cvgb8?t(semSf`!W~uoTh6u)3&ZWe!Ff4PAH~W{wHD%H)y5^Om)xJPm&~rx+fX zQ2u}+B8T2sE+}VcoWW1Xid$7ujQPM)49AsX0GE`lHRO~P@cEcL3RW3PXzuVzzp7+y zX-=7ia$k=~@PGI_zn;OvAYgOc^5uXXz{6)HQrXLw7g-dacfV74%a;qqYnfk4(+S?! z(liD?w_u(8E>JrxUtTU?iiDq&D}T2eg}*OcfonA1^)UURv|AK<#$#)LqWM=eh$`vTf&M9 zfC%zRO07~_W+4nMD8ll%DqkpG!wJQf4W-I@SqvmunUjl%yk#{ym7al(SgeW}{>ksT z3@2BWV4!}+}pp~d34;o{+SUH3qx!B^IwH7?Z z%I(U+HbYV|7Va^)S#TKc-1>X~jQBz^Pt^e~(&|t90$wP?Q<l8Q^uw-dS|_8$j>Qfsn+HcZ!}=< zVPSL1+MFVquh7@-Q3}&LaidV_#QIu1*TJAefUhe1|x)yzfIj}$}K>h+K z*q9+y;%MoUm$!i7erj4J-9KMnACR`$bnZrB#Lp9oSF!C|4#pN5!Z0D1wGMW700>(% z3c#$qHV4DO6PeoY))=?sZHT}UvZ|=0+#)X_gZZ<5p+1vapB5oZ3S|W}Q?hylTH zvVO5|KhS^W8;kjz4VW3$0bx-kT7%daG~6-)6kus9r(vBV57RPX6;ERTO|FbB12(`9 zpm!fIcr_7MZYg5ZP0m_uX@wG@+@KU0;b{V0CPtUH3%~CbOOhD8jaPdyu4s_F~c_nI<~u}_9&*| zfoUEc;vU8nto*v;B7pU5>ITH@nW58z@-v$*m1i_8Usgg}4fz|Zs`72MrPxxIgZ6xo zdtlZBtRV}Ehqj}VoF+N6yg&%%8M&Tz0w`$vG7GAMLj^^k85l#J`W5X^Uhs^?Yfh(1 zWE7MOz@enbNJG-1#WM}_Z#{ER&E;Hpi|2F$C|CCd%@>Rfo z%j8^aFt(0J;C4PKJEg%CubYlv_;trctK}!RFTZtvEc;POvd>xFKFOGQ|GHrgGo?9^ zn_E<5S&fRU+z2q;vjQR}<7`Nny3xQ+Q`nwu=&15ww@^U!?i?^SEc4LR#_ z3W{i0;S7W&ftC-iCg&De%1eYYOgy<*h}f|$XI%+!jV3dE6N;3nRhg2wNSe5Svm7BYkAhVRw1WJ{nwgwUxl=J1@ZOrPG zUc9EbWW91)M%+Z~8L-zYT^+qmMp0a{cn*2-~EUOH(>S2C%cWeS?nIb^%jVnXzOvBp^ zHT7w!z(0(M3Z6TYxj@FUGNF_R3!bqpZ`oO&Q~N2PdEOap$l0suxdratWw?(QYz_&{ z0goG8Zt~iK4Xes{&Qwe@L~04`$7n5l!nA0LGAN_{XMo3gZ0fKh{(|r}F3gef4US$;6JL)1o+bbd4~~ zNX;Ck;U0~OT9S%EZ!mzn1RMh^@^RQ*!EqZ(H{hp14Ct%Cp^}&hG#tHht5%g`H^*`5 zi)T-oq4-m#%QS>^kZu+vo47LC;9*F!hA?nk5QsF3K@UljL+F)Dn>&Aw*DII2z??95 zzIO=lX?j%j)S!R_ER3b4Mc7)g4FOx@u|YLp58!oTD=GmNVMl35GPuhxDp(05+=y|7 zd6TB9qK%USQgNigB1WS$XBAKhDZFxj0BfjF3gpEXOU_!tKzTz^P%I-O#`9!W0n~xu ziw+8yWJnl1oYDemPMbRq1pvgnvXZqZ0z0yt@^Z`Cl^74X0y_ueAX>1Fz(6eJ1XAz} zt6;!lijU=uDr})4zfxFDbHheVir%n-aEB?DHar*-MFndt$epT#F-ubipeAEA1)Xm#_fQ3hV^}PClhFw8-a?NB>G4*Mp4eNyz$bFx~VYO-Dq|A5S-p(v{NwV_iWR zPg{o}GZZU;;KG>rk+xr6f@5F-|1b~-p1jZWz%?a?(hb&|twtCJnvR>F3=r~}1iZTN$6 z-K%;zYjBFo#uLcOKqU;=Qd%oKGZAG$)0>RJpraP7Dk~{3!M?<6hv{yQdGe%(>4^)# zc!>68QWxmQb4ObPU!CFCL*ri8xW8xN1JIty1AYqlLQJG9cj}rd1B^ED9%kQ zEhtVZLE9tV8cSKRr3mX7ZAkI+@l~n){D*0V2Lx(^M(Fe-@3=E~)abiH?jCbb=-6@N z4foy`7Czzr2O=JPD01SY$qz?Oc_e!3wCRt|h?!}Oo%L8;d_rPUvT635l(~;TG0!}I zLF&RqX^X*rn3?tD(x+hZW<}1*Rk@bD)%gX5Yl_wumy|wTR?Z9S)^FJO%)e}^*j&jy zgq^P?GAb&HfRTT7n|)I0A_XcW<|NE5M=fb9T~an1ga!<>RLqrh5*}QWm2_U7yq~t# zeIxIypOt-Uvh3FlQ^F5lVc+9_s`yJ}pBpFpb$|JqeYgHr_UqOweuCm_Wk0_CGK*LD zrs6*>`_?$w=SIkW{8^_A&u?VkTA}#!Wk24aynlB|F834J=U!3#wX$EAp!lO@-+H-A zE`Pu5TVIiV?r9}FMR~thdA}~na2%KYy4}kAPh{Vkqxi8(c!=U(`--KDKdSh@QT#2k zU$~ku`KYvlKSH0}V?@->WWS?6p`*n%Rd${u5Pxj-#`iq>tS@yZzvS0VS z@?IwU@#)HYjN*?~!hMwYix*fvbuEhjU$P(nlI(N;qJ*zd-si}^b&~QPtoS#+l-uLH z>|2|a_ut7r_lojfrF<_?!WSvNQSrkSKS=hiJ?G_oJ7u41QT#nh_-nFXSF602EB-RY zpR4#&WxsB`@*XJrT<%R z5SRSqsL9`qjKX{0f_%%nasL(fmgBo>de4sj@b35GDhE_wKf3~K$?+zHp{KZXUn(?X6je zAOBwuJQLXv_q+QnH+Q@}4Egi(GS(f4D}CYB@PBUr{42KQrqq2Bmp^^^<`bVp;C;h4 zZw@;c*LQJj)}`kbpuC3fv*tC$t$+M?3y(jQg#33Gra#{tr@xZXRvS8X7S|c6iaOE~ zXZh*JpGQAB8R>s}=OaOfx3>Sr6FXj=J&QXUvg5?Rorv3c*th)APpQ2| zo?klY&v8#YR;$ak%|!j5UH#?9e~x>y;?k_Agt>Sxe{u7|lW_-D&%7^U3;I+4&Ceb< ze=;s++vXAB3#OocZI$g$e-?MT`1q9n+A?7lSNh-@k!z1z*U_?dpWkS-$Ii@|&$q{Y z@$}>+u3vy*KDFSrwtG6_RyX!sc6q9d zWTHP-G(Z3FN2lU!htF@DvL5w4`q%@Ol+$r5#=kcF@js$|`VAvCA3YssI1&`Pf6M)N zFMfYT)|t40szX2j(;oC+)KA6H=g!2PzdPISjo&(|+T8`t!JRFFoHme6$bRC;9jvdOnZ)K6vz`rGJP={rK0WKJ!JK z;inx{rNxAgTVE@>`9<6>3!a;7TK#=2*ZsfR`woC8lBUsN$q0x9vw{RO3K&3L=_ROO z0u{^w5X3A7%*qnPtQgK5Fo!dr*_FX?>X`%P0OqU+s379Js-7NPR#48n@BiNao~=c7 zbyas)$LTOzGiKr@l1AQ4_j_vIvszQGdce#P$~1D@==|s=GnznsUGOh`B#o?VU+QK1 zGSD8%omCCA?vu`!^{4%{2I}`&%e!Y4_sNmV6U*H?ZQYdf8PT-2@;>nxo2FkT5KI6q znl;#yzfS^t=?)b7gDscDKYF~)yHAGR(3*756Z)5#&8fcf2W0HJ8goV%L;lLHiFRWj zkoA_koL=l`4drQ2arx|9e1}DTe(!afALe#E+@@!Bi&xIrb$g!ReVB7vH|^1qoBaE{ zrrh!(4JZez4OGeu1vf;HsyZ z9p)0EbQ|x#&Np-QUimSr!C@}(v+H_~>%2+X^p#l-io={@^V7v=uJJXw4OhF?@;J<0 z?C|V;z%_ouu$D8plaRmS${Zo}D*xNTgez?W2&6Zv-r@mQ`C(5FIwx0ZbQpeZ)??*| z6u$YkR+k55x~BC0&-KsW^qnv;$@-d`+Ea)#d%IKwj@r?D8voN1Y>H zrK2GIh&|a>wXg8bb1S7i7;JKwOP^y<@!)0NWqs3u`oFnC`4YEg_+IAy7VPh zzbbXKU+t4#5I<{%ZKaF6$773A!}`ggK88-*-0K2=r2U^ZBdwZ3{WUe57=NBm{w!p! zAJP!YKj@49mUFzJ&Fn*Qp^%=^{n?@`XZc2_<~}<2UiUDU)~!!wqD zVAqV$gHv0Bezt3EuAbzZt~0pk-5<)sMIG6zbCQ2^t>YkNV`v}6-ep1cPVg0vOf2K@ zt{(6Q2wQvq$sej@?Tv-NRghx`<8=FT@r;^U*Pex4c#<>5L%J~&#%FFZ0?yHW$_ zZ;B6}roTGKdz-%a{fq_HZ$IB%JrD3Lt}ov8sT#DeGSOpR%051D^G8+oo3;>t-e!5* zz5HqS%G#GVI|Kj4 zV%G%FM|mfEW8)os^?>cqt2je{Q_jwsHFztp>M^pNyax17uJ84vA)9&o>*H_tsfg_v zb0lx{2HvH^_Kaz+^?@H~TPJEQe{Rs9^O}1?e^WMG=Nq$%Z`C2Smq`Waubj=B1{Id` zZ8CN{?z6`JG}d{7=R$to4((R&xE4U0nOyEYhbIsH`W$!$pE zKW_A(`8-!Yud@LPu8F#MX~R}8;o_yxny8Ggp_Q-+@~{FvcK3_oP}0mJthPGdNg;d>0< zW%v%mw;8_0@J)tqFnpciYYbmyIECRWB352z_!7ex8NR^qd4|t1e3s!e44-EBFNRMs ze3Ic44FAdSafXjEe3aoMBIeA*_D?R!_K6HD80HvGHf4N<6%2C>Cz~)n!wQBuhLepM zpJ4^V9K*>a7@uJU!yLoOMvTv}f?nr#@flVy%rTs- z$M_5@80HvG)@6K#6%2C>C+jdi!wQBuhLg1!pJ4^V@HKF{Z%CFgKEn!zIfj$97@uJU z!yLoOaG!>wU&*k7VVE<-eMwmx3r=RwadWLnCnFImm1 z0-bEqsA_i-m6baAOxs4tPdjhei==LC7UI&w1L*9Rhd%_7ist*S_ObH?I_=ZSz9GcV zC83N@P(7d%m(RS_n}oS6+!y@C8R+zs_|{>>B`JoNwTJa5w`kS$q&~#sL%s4dd%=2` zQ|K&n>Pxz*VqN-$`2oEnZ{O;Ev`_tn07gXi2KgU<2$*bep9Y49WaC(bvGY+cNeVpxRl4)gCodT8?8a(Ofdfu z+iFxFO3Vh;e&68>tmohriJ<#K$(;EU!mPEizEU*`uLQ8^7|+Uw!{8puV3!vL~<~5z|=nas9#*Iq_t6GS($?&v4PnC66V>0XmN zw(508UK42fl4~QBd!J`(?pLfry6HP{2*Bb766zkW` zZpid#awP1^+vR9m zt>Vb1j566*yI}ogAGtj&j#v)mpVqI2`-eIcuU&{Et6o-an$Q8(|J;;nDYYjN*Mr#` zJ15!!T|X&h>LlWI`p$q?N0A@mdE?0>ay(;8vq}+IAD+AKdBu}zzq^bsHvr#il36}W zj3;OJcM4ou9{Z2xdg1Jx(_2UKt@fShmbePmPu#K=7ut>F!|bOI{8Rzg8)>sHzmMcE zj#-g1`?C+wGs06ANAlxm>_552!58THmr|NW@=3dhwqEmAK&PL*a(@Khq1w8khE;4( zzRRUaBY5-HZsQZpF@ET%b2Ucro0pDUYNwCu(}7h_og2F4$NCQChdjM<_o)HOr)c+o9l`5t zESbA81L^T+I?VyvGX0vcMah!V~l;1wp9T-$i(^6 zU|z>^wf9ah)X%_H%X2V4HL{#h&nie?>~wzWApUMnlB44}SPyfHwk^6ekdJG-`(;K1 zuJ`<_29+Dg&;6(@b#yVT@4075_Q3=AO@B<;&~`^Dpxs74+1Q`o)y-7L#1YoJoLl>K z>HYXI)3#0ATp8>4RO?{3etfstH@A#!j_bjT;pLTm`2q6GD+)Jk51%y`PlodoTw`y| zedPeOfe>3VoF5dP;#~E2T<>$^>UZkHe_6iQ*CG$svt`RXSP{lgIN-PT;6@Li-LA}d z63S=x>aLm*(*|h&?e*(~@}-acv9WwZY%j&{HzRxVi|^@tba{^bA^XI*!y){`Jpo5M z9ftKZ7bL5o8^Wj6nLjLZLTR8o_t@7um_OIB&7UiV!G4Lmc-?<~5dZF6lN0MqF@D6u z^ZR=7n?{pYpJ+xtf zJM1Sp8@rZSU3kl?2D*0~Q9u7#U1K}*gTCCTKgk#O6AITb^Gege)ZTHH#}XjeG^S9eemTY>f1U@&%yp5RQH-i3;s&u z_eRG$VfsO*Vx7JDx(9SEW>!JI;--V22Y=vzM@A!CxPjYL2_r`oI57(reRT=(E&FL;Z zu9QJq(&m5i4Cq%r#_meMZ+~ zw6lm6P9jdO!)QlF*AlU^rii&3jIPG$sv=Id7qP;Q(N!32!|2K)R#p-*SCP@>8C_1q z$z?>WD9va~Mq4ntl!%pPBIZgm+Jw=@B2G3EvBHqi`i#~SF{dkHr8c8wjOG|ESc>hD zCt}5CMt@>-j)=Kz5i371`aPpFMXbmWadJAN-!l3&qhE=bdnsb&b4EX7^b-*)9*a2n zA)_BKI!(mNR1tIc7=4G)w;6p?#EKgtPQJ$ItBk%PV&!EKbC(!>fzjthoP1WqiZhJ< zi_xbTeL}>_KSj(PXY^4<9}#gfFJeU!qg9MPBx3HMh?V;py^qm*7`$-6|X*vaVa zjNT?qc@6Ju|dSi>lnS3(W@D~O2k~Eh?OfCy`0fYMXXpN;^ak)UdZU* zM68@IVs0Lz=P){f(QCwhm%LiU3Wk-dSa>4qmkcXbinMYC<1c4?h84?L_)-?muwscw zD;G2VBF1M}v5@f>uyBSIzp?Q7B2J#i_zWxNGX5OKPhfn8m9ts+EEdkNVx~wdXE6SB z#%EYDjq#_laE29ASomZSC&x2B!-`3aAIJEyjL)z#hJ{D7aE291kyb`A{zS%SSTTWx zk7wZwE5?bmaxCMIVSI)aqZxk`W`d$aHm7S6CDSfrIf zjNgm#8CLXU{2nZvVMTWq-c7{GT^XNYMHj~J%=n!cpJ8Q379PmL8CC>{w6X)^w`Y8Y z742BKKMQAA;V06{wv69~@flXMX8cx+-%`X1hLyf7+(*P5!_xC#b6p_O$zYb!z032Z zT)Hjpceq3??v#6-J9`IqdV>2Sq%XPmzde0Zh(7LT(5BRN_r)iB`nE2Q`xS174tLjm z&AICR;;Q2QhwH1Yyzd_VaL18xL@T) z>vJ#M>ohsOe8xiD|8YKs+-vuCW1b(1+=lyoF5O7^)?Ig5?^Oq)9g&`)r+DYyKJMjq6GCUu34#CM`x@cggAONq~>X#@Zdz6--!weL-M=V z!DW@t()Jxzi8CU%-w%(z^3nt2*R)PHB6G{P2AN-jzIysv+BXKz}*+|mj&GPINSrNQHHf6b+qNiZXC!@X=)^uzNL z?nBvhGg7ux=bNqO()2CM`+3 zr-x#M4N66+Ykr$ z>+rJ~S4BB8zT3E%i8X0`nl?%-N4~5p_vGdQ+MXjCrjqH9QnWndjm{lO_o?m%#Yp@PY-y@;|`8>{bdzGtHpC_Kl6-Zu% z-=C~}Z;bqX-ZmAZS}wJm2C5`lMAP*WV9G%9}ykQ}A`F zM0^A6UH7)5^z&8;mB_{Ax6XB+gy(DAjW+3(h|`FR3$^ah{(s)jzcMj;Q0?$vm8kxa z?Gh`KwO{HxD7Ml0Yh(w`n$+BCG<4J_+P^0T23eChHTln-h9U-s^4YjbQ_Xjd+@-03rfFnv8h5-ua#rg*~5K4r!=k)=fRo1 z%nhGb#{Q+O(?D5;^q8I%`M4RLmnv-=ZmdEEPP40gSE~WgTuav*RY>j4>np3~(D5DV zW?)Npe5^9Y&h8v5%r z?UB!wXqs$GT&7*SGin0P2TE`EbXy`jtEisakj9TDHg+Vd+OmcBH`4rOD17Wl{d%S2 zcoU?R=RHQ-k;t|aYU<3w{;#z3+Gs~=Bn@-zG7Qhlls&v}*pW)wGwQTzO2_xGW}H3I z+q(SpFeUa6WpZ;Tds45d^PAYNbbdDX39=`B$4@sn-judixNm|zx&PwMrnZ?_9~|E@ z*`7?QAC_3+C>`%E6ircoVDIKywS*UvwZYM;mqh97_9-P5@VxOzhHX zytRow$`228s!BQ?KXmwcbz0ty9XnSgneC6hn(9mSt=xGE@GB*kG_;`ocVU+mrTxv&$vR@Nbs5)L47k{xI;*UrsH}yWROG|7S(54r`C#eJ6J;rB`WQW$1pq{52{+{%YOQ{K1XJIp;&$ zAiw@K!HN$*y3^u%IXrLTUR=9i#k=gNH1yUoXQWqLUune;4K9Duf0Gx|?QRUQ;tz1= z-#9dBgtY7?vEt32_L}bWJLQkMX>G;V4|izzbREsF>aDkye4Dw;nhcntK=~`TcuU@G zv$Nn4=ZSRG?b(*R^YO^io@XqPu5qWAB|r1fwh~=(vsFM}x>MJZ&zv`FO>OJO$RB-I zu;4G%xfs#pJ;vuXZ=)2uD{bqVp+Yc;b$zVZFH=Dg#JjuXfCs)Y2$`@A{-vXbY7 zoTIf%UQLiFGOdSkmj%erIWX#nZQDe5=iFZ^k!q!Sv5Q<;{4-k?MIm9<)9? zKAUaE|F-^G!}4Wlf7tnKh#7xQ@5PqH0NP%So)a^^YUGnoJ9pN>`26#dX1w;q=AVZ* zrtc2a+R|(!~gH1|_uHG1bL7HI1m)w-rP-{NzzcugkM*N}p?HTKLG)KPP zgI-3wK|sCJtFu}mo%TR5jStcI`=9fAyw)F{ddVZH{=;7M(&L}@ zUiJRTPMTlMmx3<8pg~oGvPbCnOMAIemmhk`JSX)MjX(Dl(d8SLpVNNZIy#>yUZ?8t zcV?}$FR6>?+1#twQ98V7tGRb&m#QJ%{SDFKqc3hA_ge@pzwB+QHlLVb-)!qh+JDx+ zjnd|QpO12z)1JyVPq)_QTV(g@zpHi)jQ=2=m+{+b^?&7gi`M`6cfDkMiJ$?xkD_V+ zHqQ{Wc!z4kHW+(Seb;BK)Zz_Q0rL-(r{&kn?4`w*GC%Q2)`GV8f=q$qAKF#!$Dg&s z^lh?Ma(t6%_j=ygN5|L3EW+^(s|U5rnNRy`_ZPg7wAaV;aAm6oC?E7v_>vTK@8!Wd z&UAk3{)&7_YPWHhfhkY*3wRxsmvmR&(K{`d)`#~S{&P~B3%dq14y5Dz(HrtP$FuhZ6qjHn>_x7@RM64wK0`}TcL zJ|sOa=TZ4g4O%~G@A>yhV;}G6ePLr;)Mx$&AuDNIe9k4u$#~zw$v={;q!#gZN>YmI zm-aC#Gim(<$5A(*Q(BqLXCxVq+8KAGKW)FVIl{Z7c8^92`jyLI{Ea!}UDAicQ)-$S z;r#$t|5H?Y(#A*6ryG8%jdaBOsJBTI?<56wv7z-<_CwU0q?BWq&MetS%VYLYc$E}0 zX{ivgpZ4$Uk5Mm^2HkZt%XOydUCb7qCskWLZT*u$RR6@BsAoxwpFCQYev9@8_&nB= zq*l-0ba6_i^P5d>)Z?VHD~BGx_`(|VKb0#yOxil~xo4j+Iv$)p3lEYU%I#Y2a*~$s z)aR)CN$1yNav zp#kmbe58br1SK8HelY2ND6J10K{%4M-n_+)ect6iGwcH2Yz?3A<5Wu zR)-FI===gIe08Pw{}GZBb)3BE6uMyju`%wwcTca`!ha91@6oThSDkCTDEamMf(G?+ zH#IGtPWGMKzobU+QhBg)l>3s}>7<82w+^uhLsgTa20yd%N+*N=8da|1ngrE|Nmf^O zbV(;+yCc^=7(P>e@59|110vE%Rr$^bsdHwiPA!cr+iY?=5#n~WN{SDb$7Y^5?Y<(N zGzfMsJKATCyyVoJft~lH6QkfeE$ggbB>&iASgpBd)5%c32c_>+PLQv@bj9V>!*tSq z+@ICTG*-wfT=}xSPi{IHcew5G*GmVe+I2I(sAchvG+OH2$n4Jrs&*e{A}z0`W>0RZ=7sV>KxUz86i`e=w*=kaV?`GPtKGt zxHDmnuXP4_y>9;eiJM~NZXuC_$GT;ZVY(;xPTa9rPS)@8JKrvYtj)?w3Oq7Res}zX zux0}?$j#@9#q|!y$fJ9|zn>bHK_-}x8I4;nQQg01{eJn13=(L*b^9~(czMFjTNk7D zXOK1J-LBm+@=%q~ezbnlwB4CI+)U6+P&RjSRCzgs!C*Pk6TN%iJ! zyT9o^U3F$tW7iEsGhsdL$-ij4K-K4l?x1ZmGKorOv3%xlD^wp>KCXXzLne9fKKNS0 zN0F+>8!e7G9?K-hYV>f;7FMWi>zlV;e=n09T9IDL@YXbWMdcQqW}h-iW=_W(yNe4| zHS2t=VpKYdjGpUMv0r9S)xwSbH{UyFkws&AJ-mA|TID!3ILM%F7P+8&t21_~kGvCC z>9t4yEHZLh=%rTkm&q^6kL+0;pGDF`685e+GF*P|%JuY`YqChkq)VA0XBNw=)ogh6 z7N12Pc)r@c&})L+YU|f@8rT1^O^6-(@IOa-Ty5{UL(NguKw@9Z_!-0waWtezUEz~RsHKdSsmXm zx-HzNvLeVDtwP>i*Gtn_H0l zT$89iD-u7Dz|dV5_3tiK@wZAunkRoC#wCRCj#G!J>Xr|a?@IkZHcyfFZJabf6*s?K z&II8D`CuCFP*OfcHKNz6XOY$)$>On7;#&-kR*i^W=d*--Bu%b{4ZOKALG@R-XUCVl zK9VbgoI7=~Pf(E__x$=PKa$#|Wk&n^ERs*wdO6&3)kjiyvCf-OR&gqqBTibck9;Io z+myc4+I*!vr0s=xTv z&R;CQomkWJ@s?~-wN-q_ch48dXL#=^?Q$WTY<~FMrRUWMRl`wMi?_YYCIc5fTy1f7 zlKg})r$jf)9Af9txz3nt%T|Udr|21yW%r3IuV=Z+H9%3)um2#-i3dL@#Q15nKDsJbVfT;{^?kcz!IsS$*|=y9h%&jsTx03zonyo9$7YajF+ownJQzd ziNWN$d1Tb2gRb&Pyb>|doHD)Y#*)YsFGRGy=nyQ_gXrx-onp$q+6@9e&^QBQPo)W-m-bs zFJyt`*yw6|7pgvx=}+tUe<8EaZc-&FYYh3we7Ha_d*a{0o816*r42*hia z3BPXDJh`IT;BF)R1u|{b4afUe7RxP@mTk=&ClJF1dKb+PEmz%0SP^|>gFq_JTbJ3v zXR`d=(Al+)UJ=OCF<}cvybO_x&wn>B38VFJ{MNRF3$72~IWD>=2|I^Rdm42kkno8* z*9``2A*0$J9P<0FSTgI+7~wE~p1f`J>5FF_`g^!+j05T6eFYDu1Hj?u!3$vIqf^qq zTpFX(n4Fmy%wv|R>EtW+VRTx7^S7IE?8!mG-;A9yxHlb^jA&+D+|@Dx%qcea{6eT!QX!hBxO@w zIYkMvI55x9w{Kqq1O3qv5u+j^3fN@;;}Gn}t)0Mhz>rZLz}gSm9)a*cFzMzO(jR`p z>-+UL`S_z);I$4OKEeZj+dC4>Q$$2Y&@hn}li}LH&t-$d!#szK_TU0LG;9CWY6T?l z|GBf_H2+lxdmQ3FO-^C74_N5<-oH5R+kXY+qq(pDw7BB^Q%@m=`}$AD74M%E-{$}G z*2m(1+p2y4r+a`_bpAiBVmhe>5yxnK{U_5Z-aje+x26BPAwL=cn3E;^lX67rtKW<( zW`UWQrhM^A`&G(}`|1~cXt@raFwrrwag*XFPnkMx`iz;gW+%*yO=g_U_w%;NT(E;Uqr!$kAiR|2%Q>)L*C1 zoIQ8`!o^FMucTbPcKyc9Tet7ry_cGH|G~pYkDok!_WZ@mSFhi^O@Eh>nf3m|$LyR> zxu5gC2>;mdLeYj5ie-4=zg_2{vaiEf27|hK5Jl1P|-o7jW~ikYSNv@CkV>g9lSD6o~*ZM}xjLLk81;4v`Fl z5mSiEK|BuRIgmpc4)|OzFwiq1xDPc1)(aMh)UNLs2p{7N;KcPAhO0%m$QMlkbG`bG zi2zfm9GDdj!3rJ-)_o~oB)Q>3!N3?eT#vc{2830C>sN|lIZ83u2lopMutyz070?J2 z1N}fvC^5)G*`W%cCZIx~UZ8rQmY}kr&Y(LgmZ` z7U8L!XsgzU+N@=^BON4L@M!-F`H_RsjBWT}w9N$O@~Cw$v;&HKbYPS|JPhqW+jJAT z!+Y3tLt|`c%Vv0wIz}Fn9YGcwjAacAYiwj=<7mSiSieDoFJRgctji-~AY-5%Ii!b( zhH-3|l{l~_M6JD5!z{qQG}xO7Mq^rFX3N%LxNQxL5+S7>JPeF|p)ElS9nM10_Gtu| zQxc7K<=enQk&%)}-yx%Hz=}MzB&lX63uPqDhmQcmn34s0sd!K^j$ro{{&SAd@g2eb zZ8gUs)xf-GSSXfVwB*S-LI-h#&f*A0q>KAa0Icu)FA0DjDjh1`h-+FMTeJp;1}~{y z)P87Gj~Oupxlz!8(U26z9|hKea9~pFs(=ThsjGZxn{e1=P&qY|NX%S6+6|*dJGt(* za7oo2AQ%AB?hd=J?hvaxY`(ffN9_(}>h9oFFA~#-(GPaY=v>ezSTqXkP{+AG8r&r| zBgRC8HFXFDzoA2@At0`sLue25baSMLb8u`l9BC8+bFzT(FUU8sK>+Kr5H8ZtFDQ$K zzj`^0kCrI`agrr3^cz9t%3%OFgw}Qlb%)Owps8lj;u>t( z9ER71q^Yq@XjAxvLIi%~pcce)2!)7XmOOY6lNmm6$Oz_TwrG(8IW)f&Jdm-_#1~8p zlBA>wwl2;SB6B#Z0T$t)Asiwa8^PWc63`3;d8|u^;q-4P#+Pj1YD8ypC;=}u(Z4_W zXE)e!p6|(NLv*KRT3q#3TAbx?blLSyg7Aqsy2nppSfbbFDX6SOYw{<5`9A--@O^%D zna6k2x6&@izo?<#BHe=g<7)gzCLUZi0J@^|A7%A@zHOQB^FRE6A6Twnd`Cu0 zCu`Kh*&YSstJl-fo*vvy0QK}W^}7;Wkgpye%m zpVDaGxYHsZWe@#Tct5WS-yre?Kv!5lvnd63@d0Wtkx?1^b1f14PG9MK26%pbE9&RJ z6}1Q~2*ZX!!x$sHyeDfN7CbWSoA5*}b{g!#3^;R97}91)Xym{!AN;Bj4!{FoWBIFb z@d;lgn!^gaEi8XQ#tcKf;Bx^GsWn*P_uxv)e83VYHBZbvM{Y-P9or1fhs`KYG)=q) zdF(_QV{qJ148ikeYI2g}%2Vm~VWRwXS^E(K0|(>hOSqoe0g^>emID0pnradWW_$-r z$#R3W0|(P3a#(1y(Y;~A2&*Shp6;z$YJ=(IBH4{(zQet7i4LE~kZeG9)b214zkv~^ z9iV1wxi?bG+HxQC=UZeBMT%BLZ5g#H4NIm0VWGZ*dk>>F&tWZyI;fSnxmp zSVC4;j}5ylDYiYv7F&xM+&qBsJB$bw17S^xG`3a%EOohkA~zT;$41~mzXzNRd&8$F zMtsvj)@pacl{&`c=4*EX%j#b6Wr~0yZP0$JCm4C>Y^kQAX;?3R`0lV84c!P<-1Rvx z{DuNkrM;{**e7MCN1@-?L7C`RRbXqU**$e=TK=damxp%{9+<;C4E1Z)2K@wD2drNA z3-v;?*surW?tty`!J%zo>x@c!aF;3FydM|<6$7~QG=%EJeSqC}SXi^K_E9 z!N6R;WpE#hh|!7bRCtU|@Y537f4#vX?XV$ZICDyQf$dw|0>x@~6fOAD{;@{ZE;4M` z7%-zww}W8Rm^NtGFfJ8S59rr>0Bd0{q`eCCa_AzbF|-4R68nz_*MOEvr2MEgc5LM}1*AH0u9%Km?;e4% zTH`h(Ozh5_x2&~So8X9iZJVjnpzA?ePqeLZL_m2C7YqTJ4YKyZ{f39-&lh+Z2l+M| z9O~Cctg(FS?98gI`cv$e|5pwCs)1iM@T&%X)xfVB_*DbHYT#E5{HlRpHSntje$~ML ze>I@I;zujR_#PgI_x$*IOJe{7hA|9jyro872>^+Q>0(^OB>^y9?A91o3c#EJhF1Z=@CE=# zdobFI(Qw#Be32ZNS*K zKezCtWA+AM3!t|E#yMgH;BtUl11=BP5^x2;cbda@z5(wBTp93uz}A4B0NVi0ZstkH zJqOqp=pb+S<~ZQifbr!)IpDE?YXklaxH@2bsi7=jt#dGriUBZxCF3|mR|)(&qL*8s z=AQ|EEI+zgYW~@3{sc8Y%nAAObJhIw)ciP);(UnZ`N{4JF^lWWigaQMHbuJP=J0>F zT+uIIi+UIR^6&rM<@!6?71;%F2!kuK3)oQh|0{p~mv@C{`(SR9=@_hq2_z_HOiUHl zF6&c8&#HAIj$WCDQzECBsHAu!$RAmAYmJS_{^oD=7coe|@SbAIdocOEAb(-?qt)fP z$E%;M?pAdA6Iam~?O@_Z-o};wHoTXF5DzXTlnchvy~akMfAIt=ZsI)98Pv_eZ}Xl4&?? z<}^!dqdO0m5*kkWy=s22Qix)Hr6AndaQ(ofK0|k!D8w*9Fj0JL=w-a9+=t)th!~be zw8fYHMN-R>TeZQ1+q&6<+XVoY+Qe|_FTBnt{oYn!DWnqv9r%tn0H_z(s(w0H*+s10(~e z0QLjy2G{|x1z;n??7lmI9{8Xy9oFF<#I0018VIe;5LeSqo!mH-9-hOju2 z`ZD%a^yB_R3d3UI#!%}s)DC!YU8MjJpukGN(Xdv>^&qaLYyfBzL1c&mOCeYV;YNvz zfcl(*b=nIPF33YZ`{pZ`&ZdI&CKw8m?`fLj1>0KNcJ&h_Bz0Ga}L0)zmJ1egV|8sI3vNr3wRPXP2jdvN6e zTmYH_^Z^(EFbQB9z-E9XfNKCR0F3fHxRL-h0O|m=1qcBc3or{{8^9j`=KwAPWB}-W z0lffR0NMj|0vHM~5?}$qW`Lsrl9R><{Ruj>g640-IpzGSqO2vL>XSTiEzH#hjO+D^ zfN{NU1K1kyTIi3szF!8o3gG#GO#xd1ADhq^Fz)9+$BFaT4B+EAK^)-9fYTt{9Pk~$ zHh^yc#{F*=VC*xm09yeb5AwJ_cmy;K`Vm0mK4TzY>_?%1aWCE-Fb=Z-z_x(>0ows? z4cH#A4`2tt3cy&*`hb~JntA83vLPgcz3oPJCb5ze9i&_ouMG_dkoD za16#i0H`QP3aI;~FfLK1$oHSp{p$Z@kndM4GtT$>A1j&fx6d+{{K^H3N=tqlRZ=<8 zufzXUVOD+guX=s)I?yP$`%X42&+}zG<)svAA&t}x98y8*GySiDK&Dq<`9{zd!llRLFDVZN%-fubf zw+qd}&YqlOx_(t?tHU&@=_b@~m59#eB}hj0EZJF^3z;JX~}kSFZ|&AM$NWX-g#j@v9~+%Ky|Q*bMW-BBsj3kk%>Dy zj5j~ji(G$Gccxy_+(}ls-ozd($~Ib8?M2kanqm8qB9Vp=TR&*fcja%W0sg9r5-unTY3KSDV@9P zZ_sTp_GO$Sa8mCIfcxWx0DAyZ05Sne!E?b0pe?{404Yvm;B^5Q1pxbZZa2UcfD8aL zkgE&O24En7U%^GyyPdy}6BmVIR#U1IB&W4ZwKMnGP6GzQeUp zf)=MV0LTxd9xb}}w^3U+iF`2}*J1^o7_KNH+`owMpd!K}iU?Oic;Y)5w-GmY%=whj z<~CyGFqiHCw3$S~$59#rEnVh7u!3lFeX(+w+vo)}RuXe&zS?vhD$!cFNmO&y!H*$9 z09u?4`~~MDqW|O`A5Yg86`vvl2kpB!pQ0r}O=p^aBF}^*lU0+A0)^!5AO( ztOyVRbPJ@LH*anZba#;Hh%{h3;Kc)WNBY0z+}+)s;o4k=^$YrdibSoqd|xY8W;}}E zplupL+=89w`_ca~7xWzoc?SO-or}uU&CSga(hG;Q6h+DW8{v??G1S$!7XtrA;%aF? zJ_E5$)q?)97sg^aNOv*lkH=;IQ3v_hKzoe)mow3VK8ufxA{~x}sivm&9lIw*?b{cQN85+%AuX=%MFOsAg;lgVDi85?&fN+0Z- zF!nTVTnd+l@)`q7D2%%q#z2g|{_LO9@yC7Z3#2_+$Uk=essZSu17ms#&b41d3-^Is z|H9@Os=J=v>i>}L`Qrx0#)87ifa?(`NA1G3{&DDku-@}^m+B_g&@leT8htAPYXXmN zLphH2_aC?aH+o3=`~?0IXK3hGm@d!;V+sog-5UTeSd)kI%+HL~dDW{6OWoXrcoUOC z%N1v2BrIvzQ24D@Edk~sDl^C4URY44jwmmay@)Sg94ojM7xZ{HS{G-yC|OZ4y%KD>D& ze9p-cK4oPI&r?!_oxOVtU-I&VcMl(m`a^ri=;{5YsSngKt`oI1!waMF*`z+dPqnb1 zdZldIBY1or<=Y+c|l@M2=1g-lu zEm{a!&z}pQ-oF>%d62I^wEHJW6R+9|I4A?ok@@Rg?Dx1XFCYX2eXgqYdrAM@{rY{) zLyeE^w14POA>;Am0{nHYThlU0`ePj*iHj>Bvv0@{;bVGwfiO%D)*Nw~dV#*a0L`Ru zD9aS}@?bbVGc?x(#nm7CnAGPG&#X|Pfc~fE%n`Cd*8{_c(>?`jB4I~JNTK?}e3~lh zDPepLfWCbt8vNh15w6A5bwK~m>JRIj0{Tbm=m^+`xAyE2Uf;Px$3fPM7lrDN`A#oe zwx*3oBRBx#!BpjfLtD<8X_nexSb}?yGPtG;Z9Oz9FJ6gZqx3(I4md z-TnI)&;#emL!(CvGb>ii51(7Ls&IVTG~vdM9RkkB`ywI)SU2SJ)$@ku+dg&OisS!T z{nhjR*Zf*qdo^{I%jH(MPlkPpE%cw(ntXVU?9t4>IQ>U6&c8v!eD%AgexP$GKC^IN z0DWngCLi}jnt8{=JbnMzF=2zhf1&NHDPI_kbLp|klZD4;&d|B@&cTCp?SOM?Ve%NJ zg#D?eexQ3#>6`*^X-z)vwTnvcXT$ONi*rn24(kPJR<4nx?+$>mBSrnrly7Kj1k*arOI=9QK*wxJAy+1tEQq!NEk(ikOi_9jHyrm@KXRWwd=RFUE-mUW zlRcSTvZUG#kEj{ca|G!6&3HyG=#Q@medF+K3H1C@TwM#3!}%TO$nQDa`_Q;rT3=v) z?E8%(5CFR4>r8*!;n`AQdi|X+oZoSd)O1+y`MSfL9jm7|{%oVXUh6cdGk$>L^+!JGr=!xl&VS+~;eCNi?1<;rR@nar|sKaDK-*QhlDm zKJ!hz7g8tD`uYPUU5dichXSE5^)HH#;dnlSXB>D=Q0#KRngE}xg*}_FzZWMRY=iIV zCGqfiA^CrV@tgqn+E}N!=fiyyw9j7bv$zJvb!1^{G4*tDtQJRq*x$ZVkM|=#v_p(^ zr~V_^f5OLkrdaxGYv21j`LHviu)h=MXJm1Xgf`1xLljjO7<(&zCVh+p&p=Nc7t;Rc zXVbxW&_DGdtswyU&wlK0Md^*9;v9jael`+l9mI;Y83Q$>@zX{u-=QxPdyy;#&a<{!1RqE>6}Sj=HK#*Yboed zf{=^9+)$&zt460H2kC5U5i7MQ{sL!p82%ah5#n^PnUM-O4Sqa@bTa&|OdTs>5rQ-p zKn+7OkPz4}XIP>o49P- zD?s0;cQ0_xCqN$;;Jg^md2oFu!1GUldn*C@Ki(nXdRl<RBCrk+V4o(y znh4M8>3s#Bb>iI--ihH^C*J4bxh>w;;T;{Wp<#^D`wP6s!@E8_AHpt$_hWdcg!eXh z4~6GtaQ`geJsRFm;avyr=Wy+f_Xv32iuYM~hlA&+xR=1Y5j@kzbvmBC;+Z$z`QVxz z&qDC*70-_G+#b)5@$4ASVR3(gXR>%VfqP!O=fe9u+*3xW z?`-g13D4E>{t55H@J<8IjPR}x?*{PB2JZy%&JXuIc>jlYe(0dr{DS{?>wwJ{w5h-T zG1V2!O!dFg-|<B~_Ufu7=8%siJm=o`)1;Mh8P1zEqOP4QiUE4CBUn<`xdZ0E$SHD6D!+ck! zd_xwjQLvJ%oQ@R@@`b6#s=OOZkVPzjsumQclu>n2#PVx(u+01`7a|Tpm0MyAePfxF zwlR%bl(?Z`85%)Xqiu>v9rLu6Ql?ms{w+-ReEsaHh?s#MMUMGA^eB94Qre+~r4}!BHq_<*kA{%*5hN#LtZX40;r-#TeboO)tlUrDq+}d9N;#SeGSJ1gQtrU zL#;H&&-pqhYupXlz*Yj>dX^d?#cvb3~yFo}Zl4MDzq$AP0JO`!6I zjWGks*%U61p_TiH@?wcab%e5o%Lk2xM(i;WnTGUg-&o&->O=eRxB7U*4i5{D?MvlL zpf1L`GHt}AF(_ZYc_*M8ddUq9N$7xnU2Unn70OAjpgiS&^9N;h%*2s`E%G(}Z$vSm zJj&~vn9{LK3nA7nE6FxXMBh3r~B>A`ohDLQ! zhE0OvY+7AYzI+6W&<#2Viev4lxo_r^+Wj#?9Ee-1iGf#qB((gtPQ81E1U4`UOkr<(*L)TuBiL_TLm58VYo`V`-FW#smd%juRSb7Q(6m z)Qm@dGL zvF_I>1$|%Zf%ZDa#L1{|=aR1vom9RGnHYcbq&3I>D=B1E?W-*Hndk}n`Hvkz(AKlC zz?yRw>%gjthB%V?@O09Lfq{7$8?z^Hvr1)37**CxJwG-8cQ(r{X3j z`G0<&Y6F`L1wj7qi7?ipYy{VhEiMmzEVeL1IToQy(jG0{~d?tNwAk53j6KnkmnE2C#ekFOMssJH#5^b zKdTAs&mG`S;C!+3)znWWI|ciBo1e)a&hy=%%&&hsZ>9^J8$O3~iMoGBhnNy2YC*Zt z$V~C-`v-EM!^?P6Q~ToR0OuMdpxihAt-PglAk9m~>6dVpGZo{+8W;#imM)bh(0q(% zDU-xC3k_3^8AI_xCns80cy^Ng?j422@J`yxYuBhB(~Q>BoA|vth=X?yCcxbk%Ho+1 zx)U>J(sL6$J3&|o=Qrs72oI-cIq1jpjy(eg2zVxhd9U&HrSE8?KlkHDVV1Qu)eY(( z6V8VUKP3ajP(P`@r2Mh0QkaBk{(bxAha-*oba>@9>hPJKlZnc?c3A3S>oy{ynp#Jzn-M@p?&b^w19&- zw!OsvQT~U=jT1Hn1mvfS=X`iZi1EPIu<-oK6?*oD=W;uOgY)I2^x@7k{~c~gi5Quz zqs0GF{_1i+wERDE~n|oK9A?Dlg-S&DYqmGbBkOmw}k1l2I^0e z!L#JGFpfdj{4j}E7{+mWAu&hIP;eJ;?i*q#E3oh3ykD5h zeD+N2gIZc2ihBnLbI}l~Y{kWia94w8{OS&$11sS^2h+g!6Ew^EJsSGy^y20%$>Hxs zRAa{q=inKH>w+0@FM~X|`-#K6aNdXepf)hJgPl!FWq3cPg z=auUD73QbEfiy7a4D(a+_v!=IPPuT0cQ6L-$w9~bJ8CHB=%RGSINw_nY3hTr(1yOS z*BJu3N5fu73I0L&{FrTRZKf%YG|V|mipn3urRJ5VMtw*|m_ z&@ax!#7#XOowt$+Tu7FLt8#528*cmI>KtDOh)NcL0LGWUt$ze4hmZ#GFbxDqGxiGy zXQ4`KEg|O%o{35ji+SUHx($E=Apbs{zC{561)VTnV0`4?k3(M+@5W)BjCbQOzX)(=9rb<=>Ih`*FA%$L9;|AL-pVj1>XS zJ?Q!h`jr54OM&}w>0TV~#Bu(`IT`Q7aSsIJQ-JwSfO%MevjMtBhjSFVZ^3;XuG?@Q zC*6_bx&rUW@fm{aTHIIRnhf`N_)Nv;F0Qq4EiB!WOl@fnNjdA!%AdmMP);r;4i5t!FV!R zrOtiA`*bRgG+aZ$^@T>|cg3mN55LK(t@)^ zNoy$yXyby;Oel+yOr4m7Wq~A&ECqW}KL7ryxs_B;S~^ysI;#~e)`>6~M+*}pZIiO} zrcsb8+(uGC14|m!)CiYIJ28K_KP|$Uqkv(NS7?GztyJlXQr!RYIt3TmHURneDTS|7 zK;o-Y+XLjE@nP6kiNbwq@x(neJYO}}u|=gr;}-KAy|6SeA3TTF%&#a~y#&S4TQePX z9W;5xq48`}(xEu=g~|M#_;}W)dCpec_=&JalN|abU8MLwsy`{NgfYH){b4#%nuTHY z^AcR!!QSM9l(&Sxr-P>cD2rzcn&%4NO9S=7eLCLRsmq~X(m}%Ne5pL@a#EOtrFg}~ zQalM4H=ZP?t{?idA)z#Du#pMx3@hO_5`lyFiPb?0N13#8@O`ofs1V#N<%sVWf1PL8 z1e1TFAmvK2*mrQBBkg-&zk_=lXTMN|!J-ErGO@o+&u0 z6N_MCYHE!6NdhRZt7rH%Kubp-;!+i98cY|AA~{S-@*_t1e>%_NDo&4cauQc_X!#c` z@>0rx+WnPrHa3h0)3G+AfCp2u4rc~oxC%szzq*Vvp!R;hF(_7G%Kk inputdir= ... --- - - -cfg = { inputs = { } } -for _, a in ipairs(arg) do - local var, val = a :match "^(.-)=(.*)" - if var then cfg[var] = val else table.insert (cfg.inputs, a) end -end - --- metalua.mlc doesn't exist yet; this preload manager loads a mockup which is just --- sufficient to compile the real mlc.mlua -package.preload['metalua.mlc'] = function() - - print "Loading fake metalua.mlc module for compiler bootstrapping" - - mlc = { } - mlc.metabugs = false - - function mlc.function_of_ast (ast) - local proto = bytecode.metalua_compile (ast) - local dump = bytecode.dump_string (proto) - local func = string.undump(dump) - return func - end - - function mlc.ast_of_luastring (src) - local lx = mlp.lexer:newstream (src) - local ast = mlp.chunk (lx) - return ast - end - - function mlc.function_of_luastring (src) - local ast = mlc.ast_of_luastring (src) - local func = mlc.function_of_ast(ast) - return func - end - - function mlc.function_of_luafile (name) - local f = io.open(name, 'r') - local src = f:read '*a' - f:close() - return mlc.function_of_luastring (src, "@"..name) - end - - -- don't let require() fork a separate process for *.mlua compilations. - package.metalua_nopopen = true -end - -require 'verbose_require' -require 'metalua.base' -require 'metalua.bytecode' -require 'metalua.mlp' -require 'metalua.package2' - -local function compile_file (src_filename) - print("Compiling "..src_filename.."... ") - local src_file = io.open (src_filename, 'r') - local src = src_file:read '*a'; src_file:close() - local ast = mlc.ast_of_luastring (src) - local proto = bytecode.metalua_compile (ast, '@'..src_filename) - local dump = bytecode.dump_string (proto) - local dst_filename = cfg.output or error "no output file name specified" - local dst_file = io.open (dst_filename, 'wb') - dst_file:write(dump) - dst_file:close() - print("...Wrote "..dst_filename) -end - -if cfg.inputdir then - local sep = package.config:sub(1,1) - if not cfg.inputdir :match (sep..'$') then cfg.inputdir = cfg.inputdir..sep end -else - cfg.inputdir="" -end - -for _, x in ipairs (cfg.inputs) do compile_file (cfg.inputdir..x) end - diff --git a/src/build-utils/precompile.lua b/src/build-utils/precompile.lua deleted file mode 100644 index bf8aef9..0000000 --- a/src/build-utils/precompile.lua +++ /dev/null @@ -1,37 +0,0 @@ --- Compile all files called *.mluam in a directory and its sub-directories, --- into their *.luac counterpart. --- --- This script is windows-only, Unices have half-decent shell script languages --- which let you do the same with a find and an xargs. - -cfg = { } -for _, a in ipairs(arg) do - local var, val = a :match "^(.-)=(.*)" - if var then cfg[var] = val end -end - -if not cfg.command or not cfg.directory then - error ("Usage: "..arg[0].." command= directory=") -end - --- List all files, recursively, from newest to oldest -local f = io.popen ("dir /S /b /o-D " .. cfg.directory) - -local file_seen = { } -for src in f:lines() do - file_seen[src] = true - local base = src:match "^(.+)%.mlua$" - if base then - local target = base..".luac" - if file_seen[target] then - -- the target file has been listed before the source ==> it's newer - print ("("..target.." up-to-date)") - else - local cmd = cfg.command.." "..src.." -o "..target - print (cmd) - os.execute (cmd) - end - end -end - - diff --git a/src/compiler/Makefile b/src/compiler/Makefile deleted file mode 100644 index 2d9d88c..0000000 --- a/src/compiler/Makefile +++ /dev/null @@ -1,67 +0,0 @@ -include ../config - -all: $(LIBRARIES) install metalua - -$(PLATFORM): all - -LUA_RUN = ../$(LUA_VM_DIR)/$(RUN) -LUA_COMPILE = ../$(LUA_VM_DIR)/$(COMPILE) - -LIBRARIES = \ - bytecode.luac \ - mlp.luac \ - mlc.luac - -# Library which compiles an AST into a bytecode string. -BYTECODE_LUA = \ - lopcodes.lua \ - lcode.lua \ - ldump.lua \ - compile.lua - -# Library which compiles source strings into AST -MLP_LUA = \ - lexer.lua \ - gg.lua \ - mlp_lexer.lua \ - mlp_misc.lua \ - mlp_table.lua \ - mlp_meta.lua \ - mlp_expr.lua \ - mlp_stat.lua \ - mlp_ext.lua - -metalua.luac: mlc.luac - -bytecode.luac: $(BYTECODE_LUA) - $(LUA_COMPILE) -o $@ $^ - -mlp.luac: $(MLP_LUA) - $(LUA_COMPILE) -o $@ $^ - -# Plain lua files compilation -%.luac: %.mlua bootstrap.lua mlp.luac bytecode.luac - $(LUA_RUN) bootstrap.lua $< - -# FIXME what's this?! some old stuff from when metalua files hadn't their own -# extensions? -# Metalua files compilation through the bootstrap compiler -%.luac: %.lua - $(LUA_COMPILE) -o $@ bootstrap $< - -# Compiler/interpreter -metalua: metalua.luac install-lib - $(LUA_RUN) metalua.luac --verbose --sharpbang '#!$(TARGET_BIN_PATH)/lua' --output metalua --file metalua.mlua - -install-lib: $(LIBRARIES) - mkdir -p $(TARGET_LUA_PATH)/metalua - cp $(LIBRARIES) $(TARGET_LUA_PATH)/metalua/ - -install: install-lib metalua - mkdir -p $(TARGET_BIN_PATH) - cp metalua $(TARGET_BIN_PATH)/ - -.PHONY: all install - -clean: - -rm *.luac metalua diff --git a/src/compiler/lexer.lua b/src/compiler/lexer.lua deleted file mode 100644 index 8b19804..0000000 --- a/src/compiler/lexer.lua +++ /dev/null @@ -1,510 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: generic Lua-style lexer definition. You need this plus --- some keyword additions to create the complete Lua lexer, --- as is done in mlp_lexer.lua. --- --- TODO: --- --- * Make it easy to define new flavors of strings. Replacing the --- lexer.patterns.long_string regexp by an extensible list, with --- customizable token tag, would probably be enough. Maybe add: --- + an index of capture for the regexp, that would specify --- which capture holds the content of the string-like token --- + a token tag --- + or a string->string transformer function. --- --- * There are some _G.table to prevent a namespace clash which has --- now disappered. remove them. ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- - -module ("lexer", package.seeall) - -require 'metalua.runtime' - - -lexer = { alpha={ }, sym={ } } -lexer.__index=lexer - -local debugf = function() end ---local debugf=printf - ----------------------------------------------------------------------- --- Patterns used by [lexer:extract] to decompose the raw string into --- correctly tagged tokens. ----------------------------------------------------------------------- -lexer.patterns = { - spaces = "^[ \r\n\t]*()", - short_comment = "^%-%-([^\n]*)()\n", - final_short_comment = "^%-%-([^\n]*)()$", - long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()", - long_string = "^%[(=*)%[\n?(.-)%]%1%]()", - number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" }, - number_exponant = "^[eE][%+%-]?%d+()", - number_hex = "^0[xX]%x+()", - word = "^([%a_][%w_]*)()" -} - ----------------------------------------------------------------------- --- unescape a whole string, applying [unesc_digits] and --- [unesc_letter] as many times as required. ----------------------------------------------------------------------- -local function unescape_string (s) - - -- Turn the digits of an escape sequence into the corresponding - -- character, e.g. [unesc_digits("123") == string.char(123)]. - local function unesc_digits (backslashes, digits) - if #backslashes%2==0 then - -- Even number of backslashes, they escape each other, not the digits. - -- Return them so that unesc_letter() can treaat them - return backslashes..digits - else - -- Remove the odd backslash, which escapes the number sequence. - -- The rest will be returned and parsed by unesc_letter() - backslashes = backslashes :sub (1,-2) - end - local k, j, i = digits:reverse():byte(1, 3) - local z = _G.string.byte "0" - local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z - if code > 255 then - error ("Illegal escape sequence '\\"..digits.. - "' in string: ASCII codes must be in [0..255]") - end - return backslashes .. string.char (code) - end - - -- Take a letter [x], and returns the character represented by the - -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"]. - local function unesc_letter(x) - local t = { - a = "\a", b = "\b", f = "\f", - n = "\n", r = "\r", t = "\t", v = "\v", - ["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" } - return t[x] or error([[Unknown escape sequence '\]]..x..[[']]) - end - - return s - :gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits) - :gsub ("\\(%D)",unesc_letter) -end - -lexer.extractors = { - "skip_whitespaces_and_comments", - "extract_short_string", "extract_word", "extract_number", - "extract_long_string", "extract_symbol" } - -lexer.token_metatable = { --- __tostring = function(a) --- return string.format ("`%s{'%s'}",a.tag, a[1]) --- end -} - -lexer.lineinfo_metatable = { } - ----------------------------------------------------------------------- --- Really extract next token fron the raw string --- (and update the index). --- loc: offset of the position just after spaces and comments --- previous_i: offset in src before extraction began ----------------------------------------------------------------------- -function lexer:extract () - local previous_i = self.i - local loc = self.i - local eof, token - - -- Put line info, comments and metatable around the tag and content - -- provided by extractors, thus returning a complete lexer token. - -- first_line: line # at the beginning of token - -- first_column_offset: char # of the last '\n' before beginning of token - -- i: scans from beginning of prefix spaces/comments to end of token. - local function build_token (tag, content) - assert (tag and content) - local i, first_line, first_column_offset, previous_line_length = - previous_i, self.line, self.column_offset, nil - - -- update self.line and first_line. i := indexes of '\n' chars - while true do - i = self.src :find ("\n", i+1, true) - if not i or i>self.i then break end -- no more '\n' until end of token - previous_line_length = i - self.column_offset - if loc and i <= loc then -- '\n' before beginning of token - first_column_offset = i - first_line = first_line+1 - end - self.line = self.line+1 - self.column_offset = i - end - - -- lineinfo entries: [1]=line, [2]=column, [3]=char, [4]=filename - local fli = { first_line, loc-first_column_offset, loc, self.src_name } - local lli = { self.line, self.i-self.column_offset-1, self.i-1, self.src_name } - --Pluto barfes when the metatable is set:( - setmetatable(fli, lexer.lineinfo_metatable) - setmetatable(lli, lexer.lineinfo_metatable) - local a = { tag = tag, lineinfo = { first=fli, last=lli }, content } - if lli[2]==-1 then lli[1], lli[2] = lli[1]-1, previous_line_length-1 end - if #self.attached_comments > 0 then - a.lineinfo.comments = self.attached_comments - fli.comments = self.attached_comments - if self.lineinfo_last then - self.lineinfo_last.comments = self.attached_comments - end - end - self.attached_comments = { } - return setmetatable (a, self.token_metatable) - end -- - - for ext_idx, extractor in ipairs(self.extractors) do - -- printf("method = %s", method) - local tag, content = self [extractor] (self) - -- [loc] is placed just after the leading whitespaces and comments; - -- for this to work, the whitespace extractor *must be* at index 1. - if ext_idx==1 then loc = self.i end - - if tag then - --printf("`%s{ %q }\t%i", tag, content, loc); - return build_token (tag, content) - end - end - - error "None of the lexer extractors returned anything!" -end - ----------------------------------------------------------------------- --- skip whites and comments --- FIXME: doesn't take into account: --- - unterminated long comments --- - short comments at last line without a final \n ----------------------------------------------------------------------- -function lexer:skip_whitespaces_and_comments() - local table_insert = _G.table.insert - repeat -- loop as long as a space or comment chunk is found - local _, j - local again = false - local last_comment_content = nil - -- skip spaces - self.i = self.src:match (self.patterns.spaces, self.i) - -- skip a long comment if any - _, last_comment_content, j = - self.src :match (self.patterns.long_comment, self.i) - if j then - table_insert(self.attached_comments, - {last_comment_content, self.i, j, "long"}) - self.i=j; again=true - end - -- skip a short comment if any - last_comment_content, j = self.src:match (self.patterns.short_comment, self.i) - if j then - table_insert(self.attached_comments, - {last_comment_content, self.i, j, "short"}) - self.i=j; again=true - end - if self.i>#self.src then return "Eof", "eof" end - until not again - - if self.src:match (self.patterns.final_short_comment, self.i) then - return "Eof", "eof" end - --assert (not self.src:match(self.patterns.short_comment, self.i)) - --assert (not self.src:match(self.patterns.long_comment, self.i)) - -- --assert (not self.src:match(self.patterns.spaces, self.i)) - return -end - ----------------------------------------------------------------------- --- extract a '...' or "..." short string ----------------------------------------------------------------------- -function lexer:extract_short_string() - -- [k] is the first unread char, [self.i] points to [k] in [self.src] - local j, k = self.i, self.src :sub (self.i,self.i) - if k~="'" and k~='"' then return end - local i = self.i + 1 - local j = i - while true do - -- k = opening char: either simple-quote or double-quote - -- i = index of beginning-of-string - -- x = next "interesting" character - -- j = position after interesting char - -- y = char just after x - local x, y - x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j) - if x == '\\' then j=j+1 -- don't parse escaped char - elseif x == k then break -- unescaped end of string - else -- eof or '\r' or '\n' reached before end of string - assert (not x or x=="\r" or x=="\n") - error "Unterminated string" - end - end - self.i = j - - return "String", unescape_string (self.src:sub (i,j-2)) -end - ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- -function lexer:extract_word() - -- Id / keyword - local word, j = self.src:match (self.patterns.word, self.i) - if word then - self.i = j - if self.alpha [word] then return "Keyword", word - else return "Id", word end - end -end - ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- -function lexer:extract_number() - -- Number - local j = self.src:match(self.patterns.number_hex, self.i) - if not j then - j = self.src:match (self.patterns.number_mantissa[1], self.i) or - self.src:match (self.patterns.number_mantissa[2], self.i) - if j then - j = self.src:match (self.patterns.number_exponant, j) or j; - end - end - if not j then return end - -- Number found, interpret with tonumber() and return it - local n = tonumber (self.src:sub (self.i, j-1)) - self.i = j - return "Number", n -end - ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- -function lexer:extract_long_string() - -- Long string - local _, content, j = self.src:match (self.patterns.long_string, self.i) - if j then self.i = j; return "String", content end -end - ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- -function lexer:extract_symbol() - -- compound symbol - local k = self.src:sub (self.i,self.i) - local symk = self.sym [k] - if not symk then - self.i = self.i + 1 - return "Keyword", k - end - for _, sym in pairs (symk) do - if sym == self.src:sub (self.i, self.i + #sym - 1) then - self.i = self.i + #sym; - return "Keyword", sym - end - end - -- single char symbol - self.i = self.i+1 - return "Keyword", k -end - ----------------------------------------------------------------------- --- Add a keyword to the list of keywords recognized by the lexer. ----------------------------------------------------------------------- -function lexer:add (w, ...) - assert(not ..., "lexer:add() takes only one arg, although possibly a table") - if type (w) == "table" then - for _, x in ipairs (w) do self:add (x) end - else - if w:match (self.patterns.word .. "$") then self.alpha [w] = true - elseif w:match "^%p%p+$" then - local k = w:sub(1,1) - local list = self.sym [k] - if not list then list = { }; self.sym [k] = list end - _G.table.insert (list, w) - elseif w:match "^%p$" then return - else error "Invalid keyword" end - end -end - ----------------------------------------------------------------------- --- Return the [n]th next token, without consumming it. --- [n] defaults to 1. If it goes pass the end of the stream, an EOF --- token is returned. ----------------------------------------------------------------------- -function lexer:peek (n) - if not n then n=1 end - if n > #self.peeked then - for i = #self.peeked+1, n do - self.peeked [i] = self:extract() - end - end - return self.peeked [n] -end - ----------------------------------------------------------------------- --- Return the [n]th next token, removing it as well as the 0..n-1 --- previous tokens. [n] defaults to 1. If it goes pass the end of the --- stream, an EOF token is returned. ----------------------------------------------------------------------- -function lexer:next (n) - n = n or 1 - self:peek (n) - local a - for i=1,n do - a = _G.table.remove (self.peeked, 1) - if a then - --debugf ("lexer:next() ==> %s %s", - -- table.tostring(a), tostring(a)) - end - self.lastline = a.lineinfo.last[1] - end - self.lineinfo_last = a.lineinfo.last - return a or eof_token -end - ----------------------------------------------------------------------- --- Returns an object which saves the stream's current state. ----------------------------------------------------------------------- --- FIXME there are more fields than that to save -function lexer:save () return { self.i; _G.table.cat(self.peeked) } end - ----------------------------------------------------------------------- --- Restore the stream's state, as saved by method [save]. ----------------------------------------------------------------------- --- FIXME there are more fields than that to restore -function lexer:restore (s) self.i=s[1]; self.peeked=s[2] end - ----------------------------------------------------------------------- --- Resynchronize: cancel any token in self.peeked, by emptying the --- list and resetting the indexes ----------------------------------------------------------------------- -function lexer:sync() - local p1 = self.peeked[1] - if p1 then - li = p1.lineinfo.first - self.line, self.i = li[1], li[3] - self.column_offset = self.i - li[2] - self.peeked = { } - self.attached_comments = p1.lineinfo.first.comments or { } - end -end - ----------------------------------------------------------------------- --- Take the source and offset of an old lexer. ----------------------------------------------------------------------- -function lexer:takeover(old) - self:sync() - self.line, self.column_offset, self.i, self.src, self.attached_comments = - old.line, old.column_offset, old.i, old.src, old.attached_comments - return self -end - --- function lexer:lineinfo() --- if self.peeked[1] then return self.peeked[1].lineinfo.first --- else return { self.line, self.i-self.column_offset, self.i } end --- end - - ----------------------------------------------------------------------- --- Return the current position in the sources. This position is between --- two tokens, and can be within a space / comment area, and therefore --- have a non-null width. :lineinfo_left() returns the beginning of the --- separation area, :lineinfo_right() returns the end of that area. --- --- ____ last consummed token ____ first unconsummed token --- / / --- XXXXX YYYYY --- \____ \____ --- :lineinfo_left() :lineinfo_right() ----------------------------------------------------------------------- -function lexer:lineinfo_right() - return self:peek(1).lineinfo.first -end - -function lexer:lineinfo_left() - return self.lineinfo_last -end - ----------------------------------------------------------------------- --- Create a new lexstream. ----------------------------------------------------------------------- -function lexer:newstream (src_or_stream, name) - name = name or "?" - if type(src_or_stream)=='table' then -- it's a stream - return setmetatable ({ }, self) :takeover (src_or_stream) - elseif type(src_or_stream)=='string' then -- it's a source string - local src = src_or_stream - local stream = { - src_name = name; -- Name of the file - src = src; -- The source, as a single string - peeked = { }; -- Already peeked, but not discarded yet, tokens - i = 1; -- Character offset in src - line = 1; -- Current line number - column_offset = 0; -- distance from beginning of file to last '\n' - attached_comments = { },-- comments accumulator - lineinfo_last = { 1, 1, 1, name } - } - setmetatable (stream, self) - - -- skip initial sharp-bang for unix scripts - -- FIXME: redundant with mlp.chunk() - if src and src :match "^#" then stream.i = src :find "\n" + 1 end - return stream - else - assert(false, ":newstream() takes a source string or a stream, not a ".. - type(src_or_stream)) - end -end - ----------------------------------------------------------------------- --- if there's no ... args, return the token a (whose truth value is --- true) if it's a `Keyword{ }, or nil. If there are ... args, they --- have to be strings. if the token a is a keyword, and it's content --- is one of the ... args, then returns it (it's truth value is --- true). If no a keyword or not in ..., return nil. ----------------------------------------------------------------------- -function lexer:is_keyword (a, ...) - if not a or a.tag ~= "Keyword" then return false end - local words = {...} - if #words == 0 then return a[1] end - for _, w in ipairs (words) do - if w == a[1] then return w end - end - return false -end - ----------------------------------------------------------------------- --- Cause an error if the next token isn't a keyword whose content --- is listed among ... args (which have to be strings). ----------------------------------------------------------------------- -function lexer:check (...) - local words = {...} - local a = self:next() - local function err () - error ("Got " .. tostring (a) .. - ", expected one of these keywords : '" .. - _G.table.concat (words,"', '") .. "'") end - - if not a or a.tag ~= "Keyword" then err () end - if #words == 0 then return a[1] end - for _, w in ipairs (words) do - if w == a[1] then return w end - end - err () -end - ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- -function lexer:clone() - local clone = { - alpha = table.deep_copy(self.alpha), - sym = table.deep_copy(self.sym) } - setmetatable(clone, self) - clone.__index = clone - return clone -end diff --git a/src/compiler/metalua.mlua b/src/compiler/metalua.mlua deleted file mode 100644 index c25b981..0000000 --- a/src/compiler/metalua.mlua +++ /dev/null @@ -1,258 +0,0 @@ ---*-lua-*- Set as a metalua file because it requires some metalua libs - ---require 'verbose_require' - -require 'metalua.compiler' -require 'metalua.clopts' -require 'metalua.mlc_xcall' - -AST_COMPILE_ERROR_NUMBER = -1 -RUNTIME_ERROR_NUMBER = -3 -BYTECODE_SYNTHESE_ERROR_NUMBER = -100 - --{ extension 'match' } - -local chunks = { } -local runargs = { } - -local acc_chunk = |kind| |arg| table.insert (chunks, { tag=kind, arg }) - -parser = clopts { - -- Chunk loading - { short = 'f', long = 'file', type = 'string', action = acc_chunk 'File', - usage = 'load a file to compile and/or run' - }, - { short = 'l', long = 'library', type = 'string', action = acc_chunk 'Library', - usage = 'load a libary from the standard paths' - }, - { short = 'e', long = 'literal', type = 'string', action = acc_chunk 'Literal', - usage = 'load a literal piece of source code' - }, - -- What to do with chunks - { short = 'o', long = 'output', type = 'string', - usage = 'set the target name of the next compiled file' - }, - { short = 'x', long = 'run', type = 'boolean', - usage = 'execute the compiled file instead of saving it (unless -o is also used)' - }, - { short = 'i', long = 'interactive', type = 'boolean', - usage = 'run an interactive loop after having run other files' - }, - -- Advanced stuff - { short = 'v', long = 'verbose', type = 'boolean', - usage = 'verbose mode' - }, - { short = 'a', long = 'print-ast', type = 'boolean', - usage = 'print the AST resulting from file compilation' - }, - { short = 'A', long = 'print-ast-lineinfo', type = 'boolean', - usage = 'print the AST resulting from file compilation, including lineinfo data' - }, - { short = 'S', long = 'print-src', type = 'boolean', - usage = 'print the AST resulting from file compilation, as re-gerenerated sources' - }, - { short = 'b', long = 'metabugs', type = 'boolean', - usage = 'show syntax errors as compile-time execution errors' - }, - { short = 's', long = 'sharpbang', type = 'string', - usage = 'set a first line to add to compiled file, typically "#!/bin/env mlr"' - }, - { long = 'no-runtime', type = 'boolean', - usage = "prevent the automatic requirement of metalua runtime" - }, - { long = '', short = 'p', type = '*', - action= function (newargs) runargs=table.icat(runargs, newargs) end, - usage = "pass all remaining arguments to the program" - }, -usage=[[ - -Compile and/or execute metalua programs. Parameters passed to the -compiler should be prefixed with an option flag, hinting what must be -done with them: take tham as file names to compile, as library names -to load, as parameters passed to the running program... When option -flags are absent, metalua tries to adopt a "Do What I Mean" approach: - -- if no code (no library, no literal expression and no file) is - specified, the first flag-less parameter is taken as a file name to - load. - -- if no code and no parameter is passed, an interactive loop is - started. - -- if a target file is specified with --output, the program is not - executed by default, unless a --run flag forces it to. Conversely, - if no --output target is specified, the code is run unless ++run - forbids it. -]]} - -local function main (...) - - local cfg = parser(...) - - ------------------------------------------------------------------- - -- Print messages if in verbose mode - ------------------------------------------------------------------- - local function verb_print (fmt, ...) - if cfg.verbose then - return printf ("[ "..fmt.." ]", ...) - end - end - - if cfg.verbose then - verb_print("raw options: %s", table.tostring(cfg)) - end - - ------------------------------------------------------------------- - -- If there's no chunk but there are params, interpret the first - -- param as a file name. - if #chunks==0 and cfg.params then - local the_file = table.remove(cfg.params, 1) - verb_print("Param %q considered as a source file", the_file) - chunks = { `File{ the_file } } - end - - ------------------------------------------------------------------- - -- If nothing to do, run REPL loop - if #chunks==0 and cfg.interactive==nil then - verb_print "Nothing to compile nor run, force interactive loop" - cfg.interactive=true - end - - - ------------------------------------------------------------------- - -- Run if asked to, or if no --output has been given - -- if cfg.run==false it's been *forced* to false, don't override. - if cfg.run==nil and not cfg.output then - verb_print("No output file specified; I'll run the program") - cfg.run = true - end - - local code = { } - - ------------------------------------------------------------------- - -- Get ASTs from sources - mlc.metabugs = cfg.metabugs - local last_file - for x in values(chunks) do - verb_print("Compiling %s", table.tostring(x)) - local st, ast - match x with - | `Library{ l } -> st, ast = true, `Call{ `Id 'require', `String{ l } } - | `Literal{ e } -> st, ast = mlc_xcall.client_literal (e) - | `File{ f } -> - st, ast = mlc_xcall.client_file (f) - -- Isolate each file in a separate fenv - if st then - ast = +{ function (...) -{ast} end (...) } - ast.source = '@'..f -- TODO [EVE] - code.source = '@'..f -- TODO [EVE] - last_file = ast - end - end - if not st then - printf ("Cannot compile %s:\n%s", table.tostring(x), ast or "no msg") - os.exit (AST_COMPILE_ERROR_NUMBER) - end - ast.origin = x - table.insert(code, ast) - end - -- The last file returns the whole chunk's result - if last_file then - local c = table.shallow_copy(last_file) - last_file <- `Return{ source = c.source, c } - end - - ------------------------------------------------------------------- - -- AST printing - if cfg['print-ast'] or cfg['print-ast-lineinfo'] then - verb_print "Resulting AST:" - for x in ivalues(code) do - printf("--- AST From %s: ---", table.tostring(x.source, 'nohash')) - if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end - if cfg['print-ast-lineinfo'] then table.print(x, 80, "indent1") - else table.print(x, 80, 'nohash') end - end - end - - ------------------------------------------------------------------- - -- Source printing - if cfg['print-src'] then - verb_print "Resulting sources:" - require 'metalua.ast_to_string' - for x in ivalues(code) do - printf("--- Source From %s: ---", table.tostring(x.source, 'nohash')) - if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end - print (ast_to_string (x)) - end - end - - -- FIXME: canonize/check AST - - ------------------------------------------------------------------- - -- Insert runtime loader - if cfg['no-runtime'] then - verb_print "Prevent insertion of command \"require 'metalua.runtime'\"" - else - table.insert(code, 1, +{require'metalua.runtime'}) - end - - local bytecode = mlc.luacstring_of_ast (code) - code = nil - - ------------------------------------------------------------------- - -- Insert #!... command - if cfg.sharpbang then - local shbang = cfg.sharpbang - verb_print ("Adding sharp-bang directive %q", shbang) - if not shbang :strmatch'^#!' then shbang = '#!' .. shbang end - if not shbang :strmatch'\n$' then shbang = shbang .. '\n' end - bytecode = shbang .. bytecode - end - - ------------------------------------------------------------------- - -- Save to file - if cfg.output then - -- FIXME: handle '-' - verb_print ("Saving to file %q", cfg.output) - local file, err_msg = io.open(cfg.output, 'wb') - if not file then error("can't open output file: "..err_msg) end - file:write(bytecode) - file:close() - if cfg.sharpbang and os.getenv "OS" ~= "Windows_NT" then - pcall(os.execute, 'chmod a+x "'..cfg.output..'"') - end - end - - ------------------------------------------------------------------- - -- Run compiled code - if cfg.run then - verb_print "Running" - local f = mlc.function_of_luacstring (bytecode) - bytecode = nil - -- FIXME: isolate execution in a ring - -- FIXME: check for failures - - runargs = table.icat(cfg.params or { }, runargs) - local function print_traceback (errmsg) - return errmsg .. '\n' .. debug.traceback ('',2) .. '\n' - end - local st, msg = xpcall(|| f(unpack (runargs)), print_traceback) - if not st then - io.stderr:write(msg) - os.exit(RUNTIME_ERROR_NUMBER) - end - end - - ------------------------------------------------------------------- - -- Run REPL loop - if cfg.interactive then - verb_print "Starting REPL loop" - require 'metalua.metaloop' - metaloop.run() - end - - verb_print "Done" - -end - -main(...) diff --git a/src/compiler/mlc.mlua b/src/compiler/mlc.mlua deleted file mode 100644 index e6e7f5d..0000000 --- a/src/compiler/mlc.mlua +++ /dev/null @@ -1,195 +0,0 @@ ---*-lua-*----------------------------------------------------------------------- --- This module is written in a more hackish way than necessary, just --- because I can. Its core feature is to dynamically generate a --- function that converts from a source format to a destination --- format; these formats are the various ways to represent a piece of --- program, from the source file to the executable function. Legal --- formats are: --- --- * luafile: the name of a file containing sources. --- * luastring: these sources as a single string. --- * lexstream: a stream of lexemes. --- * ast: an abstract syntax tree. --- * proto: a (Yueliang) struture containing a high level --- representation of bytecode. Largely based on the --- Proto structure in Lua's VM. --- * luacstring: a string dump of the function, as taken by --- loadstring() and produced by string.dump(). --- * function: an executable lua function in RAM. --- --------------------------------------------------------------------------------- - -require 'metalua.bytecode' -require 'metalua.mlp' - -mlc = { } -setmetatable(mlc, mlc) -mlc.metabugs = false - --------------------------------------------------------------------------------- --- Order of the transformations. if 'a' is on the left of 'b', then a 'a' can --- be transformed into a 'b' (but not the other way around). Since the table --- is transposed, the test is 'if mlc.order.a > mlc.order.b then error(...) end' --------------------------------------------------------------------------------- -mlc.order = table.transpose{ - 'luafile', 'luastring', 'lexstream', 'ast', 'proto', - 'luacstring', 'function' } - --------------------------------------------------------------------------------- --- The macro 'POINT(point_name, expected_type)' creates an entry point in the --- 'mlc.convert' function. When we convert a 'a' into a 'b', FIXME --------------------------------------------------------------------------------- --{ block: - jump_to_point = `If{ } - function point_builder(args) - local name, point_type, code = unpack(args) - table.insert(jump_to_point, +{src_fmt == -{name}}) -- if source format is 'name' - table.insert(jump_to_point, { `Goto{name} }) -- then jump to label 'name' - return { - --------------------------------------------------- - -- Stop if this is the destination format - --------------------------------------------------- - +{stat: if dst_fmt == -{name} then return x end }, - --------------------------------------------------- - -- Start here if the source format matches - --------------------------------------------------- - `Label{ name }, - -- +{print(" *** point "..-{name})}, -- debug trace - --------------------------------------------------- - -- Check that the type matches - --------------------------------------------------- - +{stat: assert (-{point_type} == type(x), "Invalid source type") }, - -- perform transformation operations to the next type - } - end - mlp.lexer:add 'POINT' - mlp.stat:add{ 'POINT', mlp.string, ',', mlp.string, builder = point_builder } -} -- end of meta-block - -function mlc.convert (x, src_fmt, dst_fmt, name) - -- printf(" *** Convert a %s into a %s", src_fmt, dst_fmt) - - -{ jump_to_point } - - error "Can't perform this conversion (bad src name)" - - POINT 'luafile', 'string' -- x is the src file's name - - if not name then name = '@'..x end - local f, msg = io.open(x, "rb") - if not f then error("While trying to open file '"..x.."': "..msg) end - x = f:read'*a' - f:close() - - POINT 'luastring', 'string' -- x is the source - - x = mlp.lexer:newstream(x, name) - - POINT 'lexstream', 'table' -- x is the lexeme stream - - local status -- status = compilation success - local lx=x - if mlc.metabugs - -- If metabugs is true, errors should be attributed to a parser bug. - then status, x = true, mlp.chunk (lx) - -- If metabugs is false, errors should be attributed to an invalid entry. - else status, x = pcall (mlp.chunk, lx) end - -- FIXME: this test seems wrong ??? Or is it the message? - if status and lx:peek().tag ~= "Eof" - then status, x = false, "Premature Eof" - elseif status and lx:peek().tag == "End" - then status, x = false, "Unexpected 'end' keyword" end - if not status and x then - -- x = error msg; get rid of ??? - x = x:strmatch "[^:]+:[0-9]+: (.*)" or x - local li = lx:lineinfo_left() - error (string.format ( - "Parsing error in %s line %s, column %i, char %s: \n%s", - name or "", li[1], li[2], li[3], x), 2) - return nil - end - - if x then x.source = name end -- TODO [EVE] store debug info in the special part of ast - - POINT 'ast', 'table' -- x is the AST - x = bytecode.metalua_compile(x, name or x.source) - POINT 'proto', 'table' - x = bytecode.dump_string (x) - POINT 'luacstring', 'string' -- normally x is a bytecode dump - x = string.undump(x, name) - POINT 'function', 'function' - error "Can't perform this conversion (bad dst name)" -end - --------------------------------------------------------------------------------- --- Dynamically compose a conversion function from a function name --- xxx_of_yyy() or yyy_to_xxx(). --------------------------------------------------------------------------------- -function mlc.__index(_, name) - local dst, src = name:strmatch '^([a-z]+)_of_([a-z]+)$' - if not dst then src, dst = name:strmatch '^([a-z]+)_to_([a-z]+)$' end - if not (src and dst) then return nil end -- not a converter - local osrc, odst = mlc.order[src], mlc.order[dst] -- check existence of formats - if not osrc then error ("unknown source format "..src) end - if not odst then error ("unknown destination format "..src) end - if osrc > odst then error "Can't convert in this direction" end - return |x, name| mlc.convert(x, src, dst, name) -end - --------------------------------------------------------------------------------- --- This case isn't handled by the __index method, as it goes "in the wrong direction" --------------------------------------------------------------------------------- -mlc.function_to_luacstring = string.dump -mlc.luacstring_of_function = string.dump - --------------------------------------------------------------------------------- --- These are drop-in replacement for loadfile() and loadstring(). The --- C functions will call them instead of the original versions if --- they're referenced in the registry. --------------------------------------------------------------------------------- - -lua_loadstring = loadstring -local lua_loadstring = loadstring -lua_loadfile = loadfile -local lua_loadfile = loadfile - -function loadstring(str, name) - if type(str) ~= 'string' then error 'string expected' end - if str:match '^\027LuaQ' then return lua_loadstring(str) end - local n = str:match '^#![^\n]*\n()' - if n then str=str:sub(n, -1) end - -- FIXME: handle erroneous returns (return nil + error msg) - local success, f = pcall (mlc.function_of_luastring, str, name) - if success then return f else return nil, f end -end - -function loadfile(filename) - local f, err_msg = io.open(filename, 'rb') - if not f then return nil, err_msg end - local success, src = pcall( f.read, f, '*a') - pcall(f.close, f) - if success then return loadstring (src, '@'..filename) - else return nil, src end -end - -function load(f, name) - while true do - local x = f() - if not x then break end - assert(type(x)=='string', "function passed to load() must return strings") - table.insert(acc, x) - end - return loadstring(table.concat(x)) -end - -function dostring(src) - local f, msg = loadstring(src) - if not f then error(msg) end - return f() -end - -function dofile(name) - local f, msg = loadfile(name) - if not f then error(msg) end - return f() -end diff --git a/src/compiler/mlp_expr.lua b/src/compiler/mlp_expr.lua deleted file mode 100644 index 7e98a30..0000000 --- a/src/compiler/mlp_expr.lua +++ /dev/null @@ -1,213 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mlp_expr.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: metalua parser, expression parser. This is part of the --- definition of module [mlp]. --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- --- History: --- $Log: mlp_expr.lua,v $ --- Revision 1.7 2006/11/15 09:07:50 fab13n --- debugged meta operators. --- Added command line options handling. --- --- Revision 1.6 2006/11/10 02:11:17 fab13n --- compiler faithfulness to 5.1 improved --- gg.expr extended --- mlp.expr refactored --- --- Revision 1.5 2006/11/09 09:39:57 fab13n --- some cleanup --- --- Revision 1.4 2006/11/07 21:29:02 fab13n --- improved quasi-quoting --- --- Revision 1.3 2006/11/07 04:38:00 fab13n --- first bootstrapping version. --- --- Revision 1.2 2006/11/05 15:08:34 fab13n --- updated code generation, to be compliant with 5.1 --- ----------------------------------------------------------------------- - --------------------------------------------------------------------------------- --- --- Exported API: --- * [mlp.expr()] --- * [mlp.expr_list()] --- * [mlp.func_val()] --- --------------------------------------------------------------------------------- - ---require "gg" ---require "mlp_misc" ---require "mlp_table" ---require "mlp_meta" - --------------------------------------------------------------------------------- --- These function wrappers (eta-expansions ctually) are just here to break --- some circular dependencies between mlp_xxx.lua files. --------------------------------------------------------------------------------- -local function _expr (lx) return mlp.expr (lx) end -local function _table_content (lx) return mlp.table_content (lx) end -local function block (lx) return mlp.block (lx) end -local function stat (lx) return mlp.stat (lx) end - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- Non-empty expression list. Actually, this isn't used here, but that's --- handy to give to users. --------------------------------------------------------------------------------- -expr_list = gg.list{ _expr, separators = "," } - --------------------------------------------------------------------------------- --- Helpers for function applications / method applications --------------------------------------------------------------------------------- -func_args_content = gg.list { - name = "function arguments", - _expr, separators = ",", terminators = ")" } - --- Used to parse methods -method_args = gg.multisequence{ - name = "function argument(s)", - { "{", table_content, "}" }, - { "(", func_args_content, ")", builder = fget(1) }, - { "+{", quote_content, "}" }, - function(lx) local r = opt_string(lx); return r and {r} or { } end } - --------------------------------------------------------------------------------- --- [func_val] parses a function, from opening parameters parenthese to --- "end" keyword included. Used for anonymous functions as well as --- function declaration statements (both local and global). --- --- It's wrapped in a [_func_val] eta expansion, so that when expr --- parser uses the latter, they will notice updates of [func_val] --- definitions. --------------------------------------------------------------------------------- -func_params_content = gg.list{ name="function parameters", - gg.multisequence{ { "...", builder = "Dots" }, id }, - separators = ",", terminators = {")", "|"} } - -local _func_params_content = function (lx) return func_params_content(lx) end - -func_val = gg.sequence { name="function body", - "(", func_params_content, ")", block, "end", builder = "Function" } - -local _func_val = function (lx) return func_val(lx) end - --------------------------------------------------------------------------------- --- Default parser for primary expressions --------------------------------------------------------------------------------- -function id_or_literal (lx) - local a = lx:next() - if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then - local msg - if a.tag=='Eof' then - msg = "End of file reached when an expression was expected" - elseif a.tag=='Keyword' then - msg = "An expression was expected, and `"..a[1].. - "' can't start an expression" - else - msg = "Unexpected expr token " .. _G.table.tostring (a, 'nohash') - end - gg.parse_error (lx, msg) - end - return a -end - - --------------------------------------------------------------------------------- --- Builder generator for operators. Wouldn't be worth it if "|x|" notation --- were allowed, but then lua 5.1 wouldn't compile it --------------------------------------------------------------------------------- - --- opf1 = |op| |_,a| `Op{ op, a } -local function opf1 (op) return - function (_,a) return { tag="Op", op, a } end end - --- opf2 = |op| |a,_,b| `Op{ op, a, b } -local function opf2 (op) return - function (a,_,b) return { tag="Op", op, a, b } end end - --- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed) -local function opf2r (op) return - function (a,_,b) return { tag="Op", op, b, a } end end - -local function op_ne(a, _, b) - -- The first version guarantees to return the same code as Lua, - -- but it relies on the non-standard 'ne' operator, which has been - -- suppressed from the official AST grammar (although still supported - -- in practice by the compiler). - -- return { tag="Op", "ne", a, b } - return { tag="Op", "not", { tag="Op", "eq", a, b, lineinfo= { - first = a.lineinfo.first, last = b.lineinfo.last } } } -end - - --------------------------------------------------------------------------------- --- --- complete expression --- --------------------------------------------------------------------------------- - --- FIXME: set line number. In [expr] transformers probably - -expr = gg.expr { name = "expression", - - primary = gg.multisequence{ name="expr primary", - { "(", _expr, ")", builder = "Paren" }, - { "function", _func_val, builder = fget(1) }, - { "-{", splice_content, "}", builder = fget(1) }, - { "+{", quote_content, "}", builder = fget(1) }, - { "nil", builder = "Nil" }, - { "true", builder = "True" }, - { "false", builder = "False" }, - { "...", builder = "Dots" }, - table, - id_or_literal }, - - infix = { name="expr infix op", - { "+", prec = 60, builder = opf2 "add" }, - { "-", prec = 60, builder = opf2 "sub" }, - { "*", prec = 70, builder = opf2 "mul" }, - { "/", prec = 70, builder = opf2 "div" }, - { "%", prec = 70, builder = opf2 "mod" }, - { "^", prec = 90, builder = opf2 "pow", assoc = "right" }, - { "..", prec = 40, builder = opf2 "concat", assoc = "right" }, - { "==", prec = 30, builder = opf2 "eq" }, - { "~=", prec = 30, builder = op_ne }, - { "<", prec = 30, builder = opf2 "lt" }, - { "<=", prec = 30, builder = opf2 "le" }, - { ">", prec = 30, builder = opf2r "lt" }, - { ">=", prec = 30, builder = opf2r "le" }, - { "and",prec = 20, builder = opf2 "and" }, - { "or", prec = 10, builder = opf2 "or" } }, - - prefix = { name="expr prefix op", - { "not", prec = 80, builder = opf1 "not" }, - { "#", prec = 80, builder = opf1 "len" }, - { "-", prec = 80, builder = opf1 "unm" } }, - - suffix = { name="expr suffix op", - { "[", _expr, "]", builder = function (tab, idx) - return {tag="Index", tab, idx[1]} end}, - { ".", id, builder = function (tab, field) - return {tag="Index", tab, id2string(field[1])} end }, - { "(", func_args_content, ")", builder = function(f, args) - return {tag="Call", f, unpack(args[1])} end }, - { "{", _table_content, "}", builder = function (f, arg) - return {tag="Call", f, arg[1]} end}, - { ":", id, method_args, builder = function (obj, post) - return {tag="Invoke", obj, id2string(post[1]), unpack(post[2])} end}, - { "+{", quote_content, "}", builder = function (f, arg) - return {tag="Call", f, arg[1] } end }, - default = { name="opt_string_arg", parse = mlp.opt_string, builder = function(f, arg) - return {tag="Call", f, arg } end } } } diff --git a/src/compiler/mlp_ext.lua b/src/compiler/mlp_ext.lua deleted file mode 100644 index af97803..0000000 --- a/src/compiler/mlp_ext.lua +++ /dev/null @@ -1,89 +0,0 @@ --------------------------------------------------------------------------------- --- --- Non-Lua syntax extensions --- --------------------------------------------------------------------------------- - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- Alebraic Datatypes --------------------------------------------------------------------------------- -local function adt (lx) - local tagval = id (lx) [1] - local tagkey = {tag="Pair", {tag="String", "tag"}, {tag="String", tagval} } - if lx:peek().tag == "String" or lx:peek().tag == "Number" then - return { tag="Table", tagkey, lx:next() } - elseif lx:is_keyword (lx:peek(), "{") then - local x = table (lx) - _G.table.insert (x, 1, tagkey) - return x - else return { tag="Table", tagkey } end -end - -expr:add{ "`", adt, builder = fget(1) } - --------------------------------------------------------------------------------- --- Anonymous lambda --------------------------------------------------------------------------------- -local lambda_expr = gg.sequence{ - "|", func_params_content, "|", expr, - builder= function (x) - local li = x[2].lineinfo - return { tag="Function", x[1], - { {tag="Return", x[2], lineinfo=li }, lineinfo=li } } - end } - --- In an earlier version, lambda_expr took an expr_list rather than an expr --- after the 2nd bar. However, it happened to be much more of a burden than an --- help, So finally I disabled it. If you want to return several results, --- use the long syntax. --------------------------------------------------------------------------------- --- local lambda_expr = gg.sequence{ --- "|", func_params_content, "|", expr_list, --- builder= function (x) --- return {tag="Function", x[1], { {tag="Return", unpack(x[2]) } } } end } - -expr:add (lambda_expr) - --------------------------------------------------------------------------------- --- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell. --- This is not part of Lua 5.1 syntax, so it's added to the expression --- afterwards, so that it's easier to disable. --------------------------------------------------------------------------------- -local function expr_in_backquotes (lx) return expr(lx, 35) end - -expr.infix:add{ name = "infix function", - "`", expr_in_backquotes, "`", prec = 35, assoc="left", - builder = function(a, op, b) return {tag="Call", op[1], a, b} end } - - --------------------------------------------------------------------------------- --- table.override assignment --------------------------------------------------------------------------------- - -mlp.lexer:add "<-" -stat.assignments["<-"] = function (a, b) - assert( #a==1 and #b==1, "No multi-args for '<-'") - return { tag="Call", { tag="Index", { tag="Id", "table" }, - { tag="String", "override" } }, - a[1], b[1]} -end - --------------------------------------------------------------------------------- --- C-style op+assignments --------------------------------------------------------------------------------- -local function op_assign(kw, op) - local function rhs(a, b) - return { tag="Op", op, a, b } - end - local function f(a,b) - return { tag="Set", a, _G.table.imap(rhs, a, b) } - end - mlp.lexer:add (kw) - mlp.stat.assignments[kw] = f -end - -_G.table.iforeach (op_assign, - {"+=", "-=", "*=", "/="}, - {"add", "sub", "mul", "div"}) \ No newline at end of file diff --git a/src/compiler/mlp_lexer.lua b/src/compiler/mlp_lexer.lua deleted file mode 100644 index be290f1..0000000 --- a/src/compiler/mlp_lexer.lua +++ /dev/null @@ -1,32 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: Source file lexer. ~~Currently only works on strings. --- Some API refactoring is needed. --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006-2007, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- - -module ("mlp", package.seeall) - -require "lexer" - -local mlp_lexer = lexer.lexer:clone() - -local keywords = { - "and", "break", "do", "else", "elseif", - "end", "false", "for", "function", "if", - "in", "local", "nil", "not", "or", "repeat", - "return", "then", "true", "until", "while", - "...", "..", "==", ">=", "<=", "~=", - "+{", "-{" } - -for w in values(keywords) do mlp_lexer:add(w) end - -_M.lexer = mlp_lexer diff --git a/src/compiler/mlp_meta.lua b/src/compiler/mlp_meta.lua deleted file mode 100644 index 27d476a..0000000 --- a/src/compiler/mlp_meta.lua +++ /dev/null @@ -1,118 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mlp_meta.lua,v 1.4 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: Meta-operations: AST quasi-quoting and splicing --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- - - --------------------------------------------------------------------------------- --- --- Exported API: --- * [mlp.splice_content()] --- * [mlp.quote_content()] --- --------------------------------------------------------------------------------- - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- External splicing: compile an AST into a chunk, load and evaluate --- that chunk, and replace the chunk by its result (which must also be --- an AST). --------------------------------------------------------------------------------- - -function splice (ast) - local f = mlc.function_of_ast(ast, '=splice') - local result=f() - return result -end - --------------------------------------------------------------------------------- --- Going from an AST to an AST representing that AST --- the only key being lifted in this version is ["tag"] --------------------------------------------------------------------------------- -function quote (t) - --print("QUOTING:", _G.table.tostring(t, 60)) - local cases = { } - function cases.table (t) - local mt = { tag = "Table" } - --_G.table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } }) - if t.tag == "Splice" then - assert (#t==1, "Invalid splice") - local sp = t[1] - return sp - elseif t.tag then - _G.table.insert (mt, { tag = "Pair", quote "tag", quote (t.tag) }) - end - for _, v in ipairs (t) do - _G.table.insert (mt, quote(v)) - end - return mt - end - function cases.number (t) return { tag = "Number", t, quote = true } end - function cases.string (t) return { tag = "String", t, quote = true } end - return cases [ type (t) ] (t) -end - --------------------------------------------------------------------------------- --- when this variable is false, code inside [-{...}] is compiled and --- avaluated immediately. When it's true (supposedly when we're --- parsing data inside a quasiquote), [-{foo}] is replaced by --- [`Splice{foo}], which will be unpacked by [quote()]. --------------------------------------------------------------------------------- -in_a_quote = false - --------------------------------------------------------------------------------- --- Parse the inside of a "-{ ... }" --------------------------------------------------------------------------------- -function splice_content (lx) - local parser_name = "expr" - if lx:is_keyword (lx:peek(2), ":") then - local a = lx:next() - lx:next() -- skip ":" - assert (a.tag=="Id", "Invalid splice parser name") - parser_name = a[1] - end - local ast = mlp[parser_name](lx) - if in_a_quote then - --printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60)) - return { tag="Splice", ast } - else - if parser_name == "expr" then ast = { { tag="Return", ast } } - elseif parser_name == "stat" then ast = { ast } - elseif parser_name ~= "block" then - error ("splice content must be an expr, stat or block") end - --printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60)) - return splice (ast) - end -end - --------------------------------------------------------------------------------- --- Parse the inside of a "+{ ... }" --------------------------------------------------------------------------------- -function quote_content (lx) - local parser - if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content } - parser = mlp[id(lx)[1]] - lx:next() - else -- +{ content } - parser = mlp.expr - end - - local prev_iq = in_a_quote - in_a_quote = true - --print("IN_A_QUOTE") - local content = parser (lx) - local q_content = quote (content) - in_a_quote = prev_iq - return q_content -end - diff --git a/src/compiler/mlp_misc.lua b/src/compiler/mlp_misc.lua deleted file mode 100644 index bfdc376..0000000 --- a/src/compiler/mlp_misc.lua +++ /dev/null @@ -1,185 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mlp_misc.lua,v 1.6 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: metalua parser, miscellaneous utility functions. --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- --- History: --- $Log: mlp_misc.lua,v $ --- Revision 1.6 2006/11/15 09:07:50 fab13n --- debugged meta operators. --- Added command line options handling. --- --- Revision 1.5 2006/11/10 02:11:17 fab13n --- compiler faithfulness to 5.1 improved --- gg.expr extended --- mlp.expr refactored --- --- Revision 1.4 2006/11/09 09:39:57 fab13n --- some cleanup --- --- Revision 1.3 2006/11/07 04:38:00 fab13n --- first bootstrapping version. --- --- Revision 1.2 2006/11/05 15:08:34 fab13n --- updated code generation, to be compliant with 5.1 --- ----------------------------------------------------------------------- - --------------------------------------------------------------------------------- --- --- Exported API: --- * [mlp.fget()] --- * [mlp.id()] --- * [mlp.opt_id()] --- * [mlp.id_list()] --- * [mlp.gensym()] --- * [mlp.string()] --- * [mlp.opt_string()] --- * [mlp.id2string()] --- --------------------------------------------------------------------------------- - ---require "gg" ---require "mll" - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- returns a function that takes the [n]th element of a table. --- if [tag] is provided, then this element is expected to be a --- table, and this table receives a "tag" field whose value is --- set to [tag]. --- --- The primary purpose of this is to generate builders for --- grammar generators. It has little purpose in metalua, as lambda has --- a lightweight syntax. --------------------------------------------------------------------------------- - -function fget (n, tag) - assert (type (n) == "number") - if tag then - assert (type (tag) == "string") - return function (x) - assert (type (x[n]) == "table") - return {tag=tag, unpack(x[n])} end - else - return function (x) return x[n] end - end -end - - --------------------------------------------------------------------------------- --- Try to read an identifier (possibly as a splice), or return [false] if no --- id is found. --------------------------------------------------------------------------------- -function opt_id (lx) - local a = lx:peek(); - if lx:is_keyword (a, "-{") then - local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1] - if v.tag ~= "Id" and v.tag ~= "Splice" then - gg.parse_error(lx,"Bad id splice") - end - return v - elseif a.tag == "Id" then return lx:next() - else return false end -end - --------------------------------------------------------------------------------- --- Mandatory reading of an id: causes an error if it can't read one. --------------------------------------------------------------------------------- -function id (lx) - return opt_id (lx) or gg.parse_error(lx,"Identifier expected") -end - --------------------------------------------------------------------------------- --- Common helper function --------------------------------------------------------------------------------- -id_list = gg.list { primary = mlp.id, separators = "," } - --------------------------------------------------------------------------------- --- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier. --- The main purpose is to avoid variable capture in macros. --- --- If a string is passed as an argument, theis string will be part of the --- id name (helpful for macro debugging) --------------------------------------------------------------------------------- -local gensymidx = 0 - -function gensym (arg) - gensymidx = gensymidx + 1 - return { tag="Id", _G.string.format(".%i.%s", gensymidx, arg or "")} -end - --------------------------------------------------------------------------------- --- Converts an identifier into a string. Hopefully one day it'll handle --- splices gracefully, but that proves quite tricky. --------------------------------------------------------------------------------- -function id2string (id) - --print("id2string:", disp.ast(id)) - if id.tag == "Id" then id.tag = "String"; return id - elseif id.tag == "Splice" then - assert (in_a_quote, "can't do id2string on an outermost splice") - error ("id2string on splice not implemented") - -- Evaluating id[1] will produce `Id{ xxx }, - -- and we want it to produce `String{ xxx } - -- Morally, this is what I want: - -- return `String{ `Index{ `Splice{ id[1] }, `Number 1 } } - -- That is, without sugar: - return {tag="String", {tag="Index", {tag="Splice", id[1] }, - {tag="Number", 1 } } } - else error ("Identifier expected: ".._G.table.tostring(id, 'nohash')) end -end - --------------------------------------------------------------------------------- --- Read a string, possibly spliced, or return an error if it can't --------------------------------------------------------------------------------- -function string (lx) - local a = lx:peek() - if lx:is_keyword (a, "-{") then - local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1] - if v.tag ~= "" and v.tag ~= "Splice" then - gg.parse_error(lx,"Bad string splice") - end - return v - elseif a.tag == "String" then return lx:next() - else error "String expected" end -end - --------------------------------------------------------------------------------- --- Try to read a string, or return false if it can't. No splice allowed. --------------------------------------------------------------------------------- -function opt_string (lx) - return lx:peek().tag == "String" and lx:next() -end - --------------------------------------------------------------------------------- --- Chunk reader: block + Eof --------------------------------------------------------------------------------- -function skip_initial_sharp_comment (lx) - -- Dirty hack: I'm happily fondling lexer's private parts - -- FIXME: redundant with lexer:newstream() - lx :sync() - local i = lx.src:match ("^#.-\n()", lx.i) - if i then lx.i, lx.column_offset, lx.line = i, i, lx.line+1 end -end - -local function _chunk (lx) - if lx:peek().tag == 'Eof' then return { } -- handle empty files - else - skip_initial_sharp_comment (lx) - local chunk = block (lx) - if lx:peek().tag ~= "Eof" then error "End-of-file expected" end - return chunk - end -end - --- chunk is wrapped in a sequence so that it has a "transformer" field. -chunk = gg.sequence { _chunk, builder = unpack } \ No newline at end of file diff --git a/src/compiler/mlp_stat.lua b/src/compiler/mlp_stat.lua deleted file mode 100644 index c6edad1..0000000 --- a/src/compiler/mlp_stat.lua +++ /dev/null @@ -1,226 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mlp_stat.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $ --- --- Summary: metalua parser, statement/block parser. This is part of --- the definition of module [mlp]. --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- --- ----------------------------------------------------------------------- - --------------------------------------------------------------------------------- --- --- Exports API: --- * [mlp.stat()] --- * [mlp.block()] --- * [mlp.for_header()] --- --------------------------------------------------------------------------------- - --------------------------------------------------------------------------------- --- eta-expansions to break circular dependency --------------------------------------------------------------------------------- -local expr = function (lx) return mlp.expr (lx) end -local func_val = function (lx) return mlp.func_val (lx) end -local expr_list = function (lx) return mlp.expr_list(lx) end - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- List of all keywords that indicate the end of a statement block. Users are --- likely to extend this list when designing extensions. --------------------------------------------------------------------------------- - - -local block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" } - --- FIXME: this must be handled from within GG!!! -function block_terminators:add(x) - if type (x) == "table" then for _, y in ipairs(x) do self:add (y) end - else _G.table.insert (self, x) end -end - --------------------------------------------------------------------------------- --- list of statements, possibly followed by semicolons --------------------------------------------------------------------------------- -block = gg.list { - name = "statements block", - terminators = block_terminators, - primary = function (lx) - -- FIXME use gg.optkeyword() - local x = stat (lx) - if lx:is_keyword (lx:peek(), ";") then lx:next() end - return x - end } - --------------------------------------------------------------------------------- --- Helper function for "return " parsing. --- Called when parsing return statements. --- The specific test for initial ";" is because it's not a block terminator, --- so without itgg.list would choke on "return ;" statements. --- We don't make a modified copy of block_terminators because this list --- is sometimes modified at runtime, and the return parser would get out of --- sync if it was relying on a copy. --------------------------------------------------------------------------------- -local return_expr_list_parser = gg.multisequence{ - { ";" , builder = function() return { } end }, - default = gg.list { - expr, separators = ",", terminators = block_terminators } } - --------------------------------------------------------------------------------- --- for header, between [for] and [do] (exclusive). --- Return the `Forxxx{...} AST, without the body element (the last one). --------------------------------------------------------------------------------- -function for_header (lx) - local var = mlp.id (lx) - if lx:is_keyword (lx:peek(), "=") then - -- Fornum: only 1 variable - lx:next() -- skip "=" - local e = expr_list (lx) - assert (2 <= #e and #e <= 3, "2 or 3 values in a fornum") - return { tag="Fornum", var, unpack (e) } - else - -- Forin: there might be several vars - local a = lx:is_keyword (lx:next(), ",", "in") - if a=="in" then var_list = { var, lineinfo = var.lineinfo } else - -- several vars; first "," skipped, read other vars - var_list = gg.list{ - primary = id, separators = ",", terminators = "in" } (lx) - _G.table.insert (var_list, 1, var) -- put back the first variable - lx:next() -- skip "in" - end - local e = expr_list (lx) - return { tag="Forin", var_list, e } - end -end - --------------------------------------------------------------------------------- --- Function def parser helper: id ( . id ) * --------------------------------------------------------------------------------- -local function fn_builder (list) - local r = list[1] - for i = 2, #list do r = { tag="Index", r, id2string(list[i]) } end - return r -end -local func_name = gg.list{ id, separators = ".", builder = fn_builder } - --------------------------------------------------------------------------------- --- Function def parser helper: ( : id )? --------------------------------------------------------------------------------- -local method_name = gg.onkeyword{ name = "method invocation", ":", id, - transformers = { function(x) return x and id2string(x) end } } - --------------------------------------------------------------------------------- --- Function def builder --------------------------------------------------------------------------------- -local function funcdef_builder(x) - local name, method, func = x[1], x[2], x[3] - if method then - name = { tag="Index", name, method, lineinfo = { - first = name.lineinfo.first, - last = method.lineinfo.last } } - _G.table.insert (func[1], 1, {tag="Id", "self"}) - end - local r = { tag="Set", {name}, {func} } - r[1].lineinfo = name.lineinfo - r[2].lineinfo = func.lineinfo - return r -end - - --------------------------------------------------------------------------------- --- if statement builder --------------------------------------------------------------------------------- -local function if_builder (x) - local cb_pairs, else_block, r = x[1], x[2], {tag="If"} - for i=1,#cb_pairs do r[2*i-1]=cb_pairs[i][1]; r[2*i]=cb_pairs[i][2] end - if else_block then r[#r+1] = else_block end - return r -end - --------------------------------------------------------------------------------- --- produce a list of (expr,block) pairs --------------------------------------------------------------------------------- -local elseifs_parser = gg.list { - gg.sequence { expr, "then", block }, - separators = "elseif", - terminators = { "else", "end" } } - --------------------------------------------------------------------------------- --- assignments and calls: statements that don't start with a keyword --------------------------------------------------------------------------------- -local function assign_or_call_stat_parser (lx) - local e = expr_list (lx) - local a = lx:is_keyword(lx:peek()) - local op = a and stat.assignments[a] - if op then - --FIXME: check that [e] is a LHS - lx:next() - local v = expr_list (lx) - if type(op)=="string" then return { tag=op, e, v } - else return op (e, v) end - else - assert (#e > 0) - if #e > 1 then - gg.parse_error (lx, - "comma is not a valid statement separator; statement can be ".. - "separated by semicolons, or not separated at all") end - if e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then - local typename - if e[1].tag == 'Id' then - typename = '("'..e[1][1]..'") is an identifier' - elseif e[1].tag == 'Op' then - typename = "is an arithmetic operation" - else typename = "is of type '"..(e[1].tag or "").."'" end - - gg.parse_error (lx, "This expression " .. typename .. - "; a statement was expected, and only function and method call ".. - "expressions can be used as statements"); - end - return e[1] - end -end - -local_stat_parser = gg.multisequence{ - -- local function - { "function", id, func_val, builder = - function(x) - local vars = { x[1], lineinfo = x[1].lineinfo } - local vals = { x[2], lineinfo = x[2].lineinfo } - return { tag="Localrec", vars, vals } - end }, - -- local ( = )? - default = gg.sequence{ id_list, gg.onkeyword{ "=", expr_list }, - builder = function(x) return {tag="Local", x[1], x[2] or { } } end } } - --------------------------------------------------------------------------------- --- statement --------------------------------------------------------------------------------- -stat = gg.multisequence { - name="statement", - { "do", block, "end", builder = - function (x) return { tag="Do", unpack (x[1]) } end }, - { "for", for_header, "do", block, "end", builder = - function (x) x[1][#x[1]+1] = x[2]; return x[1] end }, - { "function", func_name, method_name, func_val, builder=funcdef_builder }, - { "while", expr, "do", block, "end", builder = "While" }, - { "repeat", block, "until", expr, builder = "Repeat" }, - { "local", local_stat_parser, builder = fget (1) }, - { "return", return_expr_list_parser, builder = fget (1, "Return") }, - { "break", builder = function() return { tag="Break" } end }, - { "-{", splice_content, "}", builder = fget(1) }, - { "if", elseifs_parser, gg.onkeyword{ "else", block }, "end", - builder = if_builder }, - default = assign_or_call_stat_parser } - -stat.assignments = { - ["="] = "Set" } - -function stat.assignments:add(k, v) self[k] = v end diff --git a/src/compiler/mlp_table.lua b/src/compiler/mlp_table.lua deleted file mode 100644 index dbaa784..0000000 --- a/src/compiler/mlp_table.lua +++ /dev/null @@ -1,92 +0,0 @@ ----------------------------------------------------------------------- --- Metalua: $Id: mlp_table.lua,v 1.5 2006/11/10 02:11:17 fab13n Exp $ --- --- Summary: metalua parser, table constructor parser. This is part --- of thedefinition of module [mlp]. --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- ----------------------------------------------------------------------- --- History: --- $Log: mlp_table.lua,v $ --- Revision 1.5 2006/11/10 02:11:17 fab13n --- compiler faithfulness to 5.1 improved --- gg.expr extended --- mlp.expr refactored --- --- Revision 1.4 2006/11/09 09:39:57 fab13n --- some cleanup --- --- Revision 1.3 2006/11/07 04:38:00 fab13n --- first bootstrapping version. --- --- Revision 1.2 2006/11/05 15:08:34 fab13n --- updated code generation, to be compliant with 5.1 --- ----------------------------------------------------------------------- - --------------------------------------------------------------------------------- --- --- Exported API: --- * [mlp.table_field()] --- * [mlp.table_content()] --- * [mlp.table()] --- --- KNOWN BUG: doesn't handle final ";" or "," before final "}" --- --------------------------------------------------------------------------------- - ---require "gg" ---require "mll" ---require "mlp_misc" - -module ("mlp", package.seeall) - --------------------------------------------------------------------------------- --- eta expansion to break circular dependencies: --------------------------------------------------------------------------------- -local function _expr (lx) return expr(lx) end - --------------------------------------------------------------------------------- --- [[key] = value] table field definition --------------------------------------------------------------------------------- -local bracket_field = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" } - --------------------------------------------------------------------------------- --- [id = value] or [value] table field definition; --- [[key]=val] are delegated to [bracket_field()] --------------------------------------------------------------------------------- -function table_field (lx) - if lx:is_keyword (lx:peek(), "[") then return bracket_field (lx) end - local e = _expr (lx) - if lx:is_keyword (lx:peek(), "=") then - lx:next(); -- skip the "=" - local key = id2string(e) - local val = _expr(lx) - local r = { tag="Pair", key, val } - r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last } - return r - else return e end -end - -local function _table_field(lx) return table_field(lx) end - --------------------------------------------------------------------------------- --- table constructor, without enclosing braces; returns a full table object --------------------------------------------------------------------------------- -table_content = gg.list { _table_field, - separators = { ",", ";" }, terminators = "}", builder = "Table" } - -local function _table_content(lx) return table_content(lx) end - --------------------------------------------------------------------------------- --- complete table constructor including [{...}] --------------------------------------------------------------------------------- -table = gg.sequence{ "{", _table_content, "}", builder = fget(1) } - - diff --git a/src/lib/errnode.lua b/src/lib/errnode.lua deleted file mode 100644 index 3a3ab34..0000000 --- a/src/lib/errnode.lua +++ /dev/null @@ -1,19 +0,0 @@ -require 'metalua.compiler' --- --- Ecapsulates funcion mlc.luastring_to_ast in order to protect call and parse --- error string when an error occurs. --- --- @param src string containg Lua code to evaluate --- @return AST of table type, as returned by mlc.luastring_to_ast. Contains an --- error when AST generation fails --- -function getast(src) - local status, result = pcall(mlc.luastring_to_ast, src) - if status then return result else - local line, column, offset = result:match '%(l.(%d+), c.(%d+), k.(%d+)%)' - local filename = result :match '^([^:]+)' - local msg = result :match 'line %d+, char %d+: (.-)\n' - local li = {line, column, offset, filename} - return {tag='Error', lineinfo={first=li, last=li}, msg} - end -end diff --git a/src/lib/metalua/base.lua b/src/lib/metalua/base.lua deleted file mode 100644 index 710643c..0000000 --- a/src/lib/metalua/base.lua +++ /dev/null @@ -1,104 +0,0 @@ ----------------------------------------------------------------------- ----------------------------------------------------------------------- --- --- Base library extension --- ----------------------------------------------------------------------- ----------------------------------------------------------------------- - -if not metalua then rawset(getfenv(), 'metalua', { }) end -metalua.version = "v-0.5" - -if not rawpairs then - rawpairs, rawipairs, rawtype = pairs, ipairs, type -end - -function pairs(x) - assert(type(x)=='table', 'pairs() expects a table') - local mt = getmetatable(x) - if mt then - local mtp = mt.__pairs - if mtp then return mtp(x) end - end - return rawpairs(x) -end - -function ipairs(x) - assert(type(x)=='table', 'ipairs() expects a table') - local mt = getmetatable(x) - if mt then - local mti = mt.__ipairs - if mti then return mti(x) end - end - return rawipairs(x) -end - ---[[ -function type(x) - local mt = getmetatable(x) - if mt then - local mtt = mt.__type - if mtt then return mtt end - end - return rawtype(x) -end -]] - -function min (a, ...) - for n in values{...} do if na then a=n end end - return a -end - -function o (...) - local args = {...} - local function g (...) - local result = {...} - for i=#args, 1, -1 do result = {args[i](unpack(result))} end - return unpack (result) - end - return g -end - -function id (...) return ... end -function const (k) return function () return k end end - -function printf(...) return print(string.format(...)) end -function eprintf(...) - io.stderr:write(string.format(...).."\n") -end - -function ivalues (x) - assert(type(x)=='table', 'ivalues() expects a table') - local i = 1 - local function iterator () - local r = x[i]; i=i+1; return r - end - return iterator -end - - -function values (x) - assert(type(x)=='table', 'values() expects a table') - local function iterator (state) - local it - state.content, it = next(state.list, state.content) - return it - end - return iterator, { list = x } -end - -function keys (x) - assert(type(x)=='table', 'keys() expects a table') - local function iterator (state) - local it = next(state.list, state.content) - state.content = it - return it - end - return iterator, { list = x } -end - diff --git a/src/lib/metalua/clopts.mlua b/src/lib/metalua/clopts.mlua deleted file mode 100644 index 7c9af30..0000000 --- a/src/lib/metalua/clopts.mlua +++ /dev/null @@ -1,204 +0,0 @@ --------------------------------------------------------------------------------- --- Command Line OPTionS handler --- ============================ --- --- This lib generates parsers for command-line options. It encourages --- the following of some common idioms: I'm pissed off by Unix tools --- which sometimes will let you concatenate single letters options, --- sometimes won't, will prefix long name options with simple dashes --- instead of doubles, etc. --- --------------------------------------------------------------------------------- - --- TODO: --- * add a generic way to unparse options ('grab everything') --- * doc --- * when a short options that takes a param isn't the last element of a series --- of shorts, take the remaining of the sequence as that param, e.g. -Ifoo --- * let unset strings/numbers with + --- * add a ++ long counterpart to + --- - --{ extension 'match' } - -function clopts(cfg) - local short, long, param_func = { }, { } - local legal_types = table.transpose{ - 'boolean','string','number','string*','number*','nil', '*' } - - ----------------------------------------------------------------------------- - -- Fill short and long name indexes, and check its validity - ----------------------------------------------------------------------------- - for x in ivalues(cfg) do - local xtype = type(x) - if xtype=='table' then - if not x.type then x.type='nil' end - if not legal_types[x.type] then error ("Invalid type name "..x.type) end - if x.short then - if short[x.short] then error ("multiple definitions for option "..x.short) - else short[x.short] = x end - end - if x.long then - if long[x.long] then error ("multiple definitions for option "..x.long) - else long[x.long] = x end - end - elseif xtype=='function' then - if param_func then error "multiple parameters handler in clopts" - else param_func=x end - end - end - - ----------------------------------------------------------------------------- - -- Print a help message, summarizing how to use the command line - ----------------------------------------------------------------------------- - local function print_usage(msg) - if msg then print(msg,'\n') end - print(cfg.usage or "Options:\n") - for x in values(cfg) do - if type(x) == 'table' then - local opts = { } - if x.type=='boolean' then - if x.short then opts = { '-'..x.short..'/+'..x.short } end - if x.long then table.insert (opts, '--'..x.long..'/++'..x.long) end - else - if x.short then opts = { '-'..x.short..' <'..x.type..'>' } end - if x.long then table.insert (opts, '--'..x.long..' <'..x.type..'>' ) end - end - printf(" %s: %s", table.concat(opts,', '), x.usage or '') - end - end - print'' - end - - -- Unless overridden, -h and --help display the help msg - local default_help = { action = | | print_usage() or os.exit(0); - long='help';short='h';type='nil'} - if not short.h then short.h = default_help end - if not long.help then long.help = default_help end - - ----------------------------------------------------------------------------- - -- Helper function for options parsing. Execute the attached action and/or - -- register the config in cfg. - -- - -- * cfg is the table which registers the options - -- * dict the name->config entry hash table that describes options - -- * flag is the prefix '-', '--' or '+' - -- * opt is the option name - -- * i the current index in the arguments list - -- * args is the arguments list - ----------------------------------------------------------------------------- - local function actionate(cfg, dict, flag, opt, i, args) - local entry = dict[opt] - if not entry then print_usage ("invalid option "..flag..opt); return false; end - local etype, name = entry.type, entry.name or entry.long or entry.short - match etype with - | 'string' | 'number' | 'string*' | 'number*' -> - if flag=='+' or flag=='++' then - print_usage ("flag "..flag.." is reserved for boolean options, not for "..opt) - return false - end - local arg = args[i+1] - if not arg then - print_usage ("missing parameter for option "..flag..opt) - return false - end - if etype:strmatch '^number' then - arg = tonumber(arg) - if not arg then - print_usage ("option "..flag..opt.." expects a number argument") - end - end - if etype:strmatch '%*$' then - if not cfg[name] then cfg[name]={ } end - table.insert(cfg[name], arg) - else cfg[name] = arg end - if entry.action then entry.action(arg) end - return i+2 - | 'boolean' -> - local arg = flag=='-' or flag=='--' - cfg[name] = arg - if entry.action then entry.action(arg) end - return i+1 - | 'nil' -> - cfg[name] = true; - if entry.action then entry.action() end - return i+1 - | '*' -> - local arg = table.isub(args, i+1, #args) - cfg[name] = arg - if entry.action then entry.action(arg) end - return #args+1 - | _ -> assert( false, 'undetected bad type for clopts action') - end - end - - ----------------------------------------------------------------------------- - -- Parse a list of commands: the resulting function - ----------------------------------------------------------------------------- - local function parse(...) - local cfg = { } - if not ... then return cfg end - local args = type(...)=='table' and ... or {...} - local i, i_max = 1, #args - while i <= i_max do - local arg, flag, opt, opts = args[i] - --printf('beginning of loop: i=%i/%i, arg=%q', i, i_max, arg) - if arg=='-' then - i=actionate (cfg, short, '-', '', i, args) - -{ `Goto 'continue' } - end - - ----------------------------------------------------------------------- - -- double dash option - ----------------------------------------------------------------------- - flag, opt = arg:strmatch "^(%-%-)(.*)" - if opt then - i=actionate (cfg, long, flag, opt, i, args) - -{ `Goto 'continue' } - end - - ----------------------------------------------------------------------- - -- double plus option - ----------------------------------------------------------------------- - flag, opt = arg:strmatch "^(%+%+)(.*)" - if opt then - i=actionate (cfg, long, flag, opt, i, args) - -{ `Goto 'continue' } - end - - ----------------------------------------------------------------------- - -- single plus or single dash series of short options - ----------------------------------------------------------------------- - flag, opts = arg:strmatch "^([+-])(.+)" - if opts then - local j_max, i2 = opts:len() - for j = 1, j_max do - opt = opts:sub(j,j) - --printf ('parsing short opt %q', opt) - i2 = actionate (cfg, short, flag, opt, i, args) - if i2 ~= i+1 and j < j_max then - error ('short option '..opt..' needs a param of type '..short[opt]) - end - end - i=i2 - -{ `Goto 'continue' } - end - - ----------------------------------------------------------------------- - -- handler for non-option parameter - ----------------------------------------------------------------------- - if param_func then param_func(args[i]) end - if cfg.params then table.insert(cfg.params, args[i]) - else cfg.params = { args[i] } end - i=i+1 - - -{ `Label 'continue' } - if not i then return false end - end -- - return cfg - end - - return parse -end - - diff --git a/src/lib/metalua/compiler.lua b/src/lib/metalua/compiler.lua deleted file mode 100644 index ac94fc0..0000000 --- a/src/lib/metalua/compiler.lua +++ /dev/null @@ -1,3 +0,0 @@ -require 'metalua.runtime' -require 'metalua.mlc' -require 'metalua.package2' diff --git a/src/lib/metalua/dollar.mlua b/src/lib/metalua/dollar.mlua deleted file mode 100644 index d5cf99d..0000000 --- a/src/lib/metalua/dollar.mlua +++ /dev/null @@ -1,24 +0,0 @@ --- TODO: support modules as macros? --- does it make sense to store a constant AST as a macro? - --{ extension 'match' } - -dollar = rawget(getfenv(), 'dollar') or { } - -local function dollar_builder(call) - match call with - | `Call{ `Id{name}, ... } -> return dollar[name](select(2, unpack(call))) - | `Id{name} -> - local m = dollar[name] - match type(m) with - | 'function' -> return m() - | 'table' -> return m - | 'nil' -> error "No such macro registered" - | t -> error ("Invalid macro type "..t) - end - | _ -> error "Invalid $macro, '$' should be followed by an identifier or function call" - end -end - -mlp.expr.prefix:add{ '$', prec = 100, builder = |_, x| dollar_builder(x) } -mlp.stat:add{ '$', mlp.expr, builder = |x| dollar_builder(x[1]) } diff --git a/src/lib/metalua/extension/H-runtime.mlua b/src/lib/metalua/extension/H-runtime.mlua deleted file mode 100644 index d2f4a43..0000000 --- a/src/lib/metalua/extension/H-runtime.mlua +++ /dev/null @@ -1,216 +0,0 @@ -require 'metalua.walk.id' --{ extension 'log' } - --------------------------------------------------------------------------------- --- --- H params: --- * H.alpha is the `Local{ } (or `Set{ }) statement which will --- receive the alpha-conversions required to restore the free --- variables of the transformed term. For instance, --- H+{print(1)} will be transformed into +{.1.X.print(1)}, --- and alpha will contain +{local -{`Id '.1.X.print} = print }. --- alpha is reused and augmented by successive calls to H(). --- --- * H.side contains 'inside', 'outside', 'both' or nil (equivalent to --- 'both'). It indicates the kind of hygienization that's to be --- performed. --- --- * H.keep contain a set of free variable names which must not be --- renamed. --- --- * H.kind is the kind of walker that must be used ('expr', 'stat', --- 'block'...) and defaults to 'guess'. --- --- * H:set (field, val) sets a field in H and returns H, so that calls --- can be chained, e.g.: --- > H:set(keep, {'print'}):set('side', outside)+{print(x)} --- --- * H:reset(field) sets a field to nil, and returns the value of that --- field prior to nilification. --------------------------------------------------------------------------------- - -H = { } --setmetatable(H, H) -H.__index=H -H.template = { alpha = { } } - --------------------------------------------------------------------------------- --- --------------------------------------------------------------------------------- -function H:new(x) - local instance = table.deep_copy(self.template) - if x then instance <- x end - setmetatable(instance, self) - return instance -end - --------------------------------------------------------------------------------- --- --------------------------------------------------------------------------------- -function H:__call (ast) - assert (type(ast)=='table', "H expects an AST") - - local local_renames -- only set if inside hygienization's required - - ----------------------------------------------------------------------------- - -- kind of hygienization(s) to perform: h_inseide and/or h_outside - ----------------------------------------------------------------------------- - local h_inside, h_outside do - local side = self.side or 'both' - h_inside = side=='inside' or side=='both' - h_outside = side=='outside' or side=='both' - end - - ----------------------------------------------------------------------------- - -- Initialize self.keep: - -- self.keep is a dictionary of free var names to be protected from capture - ----------------------------------------------------------------------------- - do - local k = self.keep - -- If there's no self.keep, that's an empty dictionary - if not k then k = { }; self.keep = k - -- If it's a string, consider it as a single-element dictionary - elseif type(k)=='string' then k = { [k] = true }; self.keep=k - -- If there's a list-part in self.keep, transpose it: - else for i, v in ipairs(k) do k[v], k[i] = true, nil end end - end - - ----------------------------------------------------------------------------- - -- Config skeleton for the id walker - ----------------------------------------------------------------------------- - local cfg = { expr = { }, stat = { }, id = { } } - - ----------------------------------------------------------------------------- - -- Outside hygienization: all free variables are renamed to fresh ones, - -- and self.alpha is updated to contain the assignments required to keep - -- the AST's semantics. - ----------------------------------------------------------------------------- - if h_outside then - local alpha = self.alpha - - -- free_vars is an old_name -> new_name dictionary computed from alpha: - -- self.alpha is not an efficient representation for searching. - if not alpha then alpha = { }; self.alpha = alpha end - -- FIXME: alpha should only be overridden when there actually are some - -- globals renamed. - if #alpha==0 then alpha <- `Local{ { }, { } } end - local new, old = unpack(alpha) - local free_vars = { } - - assert (#new==#old, "Invalid alpha list") - for i = 1, #new do - assert (old[i].tag=='Id' and #old[i]==1, "Invalid lhs in alpha list") - assert (new[i].tag=='Id' and #new[i]==1, "Invalid rhs in alpha list") - free_vars[old[i][1]] = new[i][1] - end - - -- Rename free variables that are not supposed to be captured. - function cfg.id.free (id) - local old_name = id[1] - if self.keep[old_name] then return end - local new_name = free_vars[old_name] - if not new_name then - new_name = mlp.gensym('X.'..old_name)[1] - free_vars[old_name] = new_name - table.insert(alpha[1], `Id{new_name}) - table.insert(alpha[2], `Id{old_name}) - end - id[1] = new_name - end - end - - ----------------------------------------------------------------------------- - -- Inside hygienization: rename all local variables and their ocurrences. - ----------------------------------------------------------------------------- - if h_inside then - - ---------------------------------------------------------------- - -- Renamings can't performed on-the-spot, as it would - -- transiently break the link between binders and bound vars, - -- thus preventing the algo to work. They're therefore stored - -- in local_renames, and performed after the whole tree has been - -- walked. - ---------------------------------------------------------------- - - local_renames = { } -- `Id{ old_name } -> new_name - local bound_vars = { } -- binding statement -> old_name -> new_name - - ---------------------------------------------------------------- - -- Give a new name to newly created local vars, store it in - -- bound_vars - ---------------------------------------------------------------- - function cfg.binder (id, binder) - if id.h_boundary then return end - local old_name = id[1] - local binder_table = bound_vars[binder] - if not binder_table then - binder_table = { } - bound_vars[binder] = binder_table - end - local new_name = mlp.gensym('L.'..old_name)[1] - binder_table[old_name] = new_name - local_renames[id] = new_name - end - - ---------------------------------------------------------------- - -- List a bound var for renaming. The new name has already been - -- chosen and put in bound_vars by cfg.binder(). - ---------------------------------------------------------------- - function cfg.id.bound (id, binder) - if id.h_boundary then return end - local old_name = id[1] - local new_name = bound_vars[binder][old_name] - --.log(bound_vars[binder]) - assert(new_name, "no alpha conversion for a bound var?!") - local_renames[id] = new_name - end - end - - ----------------------------------------------------------------------------- - -- Don't traverse subtrees marked by '!' - ----------------------------------------------------------------------------- - local cut_boundaries = |x| x.h_boundary and 'break' or nil - cfg.stat.down, cfg.expr.down = cut_boundaries, cut_boundaries - - ----------------------------------------------------------------------------- - -- The walker's config is ready, let's go. - -- After that, ids are renamed in ast, free_vars and bound_vars are set. - ----------------------------------------------------------------------------- - walk_id [self.kind or 'guess'] (cfg, ast) - - if h_inside then -- Apply local name changes - for id, new_name in pairs(local_renames) do id[1] = new_name end - end - - return ast -end - --------------------------------------------------------------------------------- --- Return H to allow call chainings --------------------------------------------------------------------------------- -function H:set(field, val) - local t = type(field) - if t=='string' then self[field]=val - elseif t=='table' then self <- field - else error("Can't set H, field arg can't be of type "..t) end - return self -end - --------------------------------------------------------------------------------- --- Return the value before reset --------------------------------------------------------------------------------- -function H:reset(field) - if type(field) ~= 'string' then error "Can only reset H string fields" end - local r = H[field] - H[field] = nil - return r -end - --- local function commit_locals_to_chunk(x) --- local alpha = H:reset 'alpha' --- --$log ('commit locals', x, alpha, 'nohash') --- if not alpha or not alpha[1][1] then return end --- if not x then return alpha end --- table.insert(x, 1, alpha) --- end - --- mlp.chunk.transformers:add (commit_locals_to_chunk) diff --git a/src/lib/metalua/extension/H.mlua b/src/lib/metalua/extension/H.mlua deleted file mode 100644 index 4f0ac41..0000000 --- a/src/lib/metalua/extension/H.mlua +++ /dev/null @@ -1,22 +0,0 @@ -require 'metalua.walk.id' --{ extension 'log' } - -mlp.expr.prefix:add{ '!', prec = 5, - builder = function(_,x) - local v = mlp.gensym() - return `Stat{ +{ block: - local -{v} = -{x}; - (-{v}).h_boundary=true }, - v } - end } - -mlp.stat:add{ '!', mlp.expr, builder = |x| +{stat: (-{x[1]}).h_boundary=true } } - --- * if there's no boundary in it, is there a need to rename vars? --- ==> first pass to mark binders which contain boundaries, --- then 2nd pass only touched those which have a splice --- in them. - -return +{ require (-{ `String{ package.metalua_extension_prefix .. 'H-runtime' } }) } - - diff --git a/src/lib/metalua/extension/anaphoric.mlua b/src/lib/metalua/extension/anaphoric.mlua deleted file mode 100644 index 2439fed..0000000 --- a/src/lib/metalua/extension/anaphoric.mlua +++ /dev/null @@ -1,54 +0,0 @@ --------------------------------------------------------------------------------- --- --- Anaphoric macros. --- --- This extension turns 'it' into a special variable, that's bound to --- an often used value: --- --- * in an 'if' statement, 'it' is bound, in a block, to the condition --- that triggered the block's execution: --- > if 1234 then y=it end; assert (y == 1234) --- --- * in a while loop, it's bound to the test: --- > while file:read "*a" do table.insert (lines, it) end --- --- 'it' is bound the the most closely surrounding structure. If you wanted to --- use its content at a deeper position in the AST, you would have to save it --- in a temporary variable. But what you should really do in such a case is --- avoiding to use anaphoric macros: they're fine for one-liner, but they --- reduce readability for bigger functions. --------------------------------------------------------------------------------- - --- TODO: 'and' operator could, and maybe should, be anaphoric as well --- TODO: anaphoric functions would be cool for recursive functions, but --- recursive calls are always in an 'if' statement, so the pronoun --- used for functions must not be the same as for 'if'. - -require 'freevars' - -local function anaphoric_if(ast) - local it_found = false - for i=2, #ast do - if freevars.block(ast[i])['it'] then - it_found = true - break - end - end - if it_found then - local cond = ast[1] - ast[1] = +{it} - return +{stat: do local it = -{cond}; -{ast} end } - end -end - -local function anaphoric_while(ast) - local it_found = false - if freevars.block(ast[2])['it'] then - local cond = ast[1] - ast[1] = +{it} - return +{stat: do local it = -{cond}; -{ast} end } - end -end - -mlp.stat:get'if'.transformers:add(anaphoric_if) -mlp.stat:get'while'.transformers:add(anaphoric_while) \ No newline at end of file diff --git a/src/lib/metalua/extension/clist.mlua b/src/lib/metalua/extension/clist.mlua deleted file mode 100644 index b7e1992..0000000 --- a/src/lib/metalua/extension/clist.mlua +++ /dev/null @@ -1,149 +0,0 @@ ----------------------------------------------------------------------- --- Metalua samples: $Id$ --- --- Summary: Lists by comprehension --- ----------------------------------------------------------------------- --- --- Copyright (c) 2006-2007, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- --------------------------------------------------------------------------------- --- --- This extension implements list comprehensions, similar to Haskell and --- Python syntax, to easily describe lists. --- --------------------------------------------------------------------------------- - --{ extension "match" } - -local function dots_builder (x) return `Dots{ x } end - -local function for_builder (x, h) - match x with - | `Comp{ _, acc } -> table.insert (acc, h[1]); return x - | `Pair{ _, _ } -> error "No explicit key in a for list generator" - | _ -> return `Comp{ x, {h[1]} } - end -end - -local function if_builder (x, p) - match x with - | `Comp{ _, acc } -> table.insert (acc, `If{ p[1] }); return x - | `Pair{ _, _ } -> error "No explicit key in a list guard" - | _ -> return `Comp{ x, p[1] } - end -end - -local function comp_builder(core, list, no_unpack) - -- [ti] = temp var holding table.insert - -- [v] = variable holding the table being built - -- [r] = the core of the list being built - local ti, v, r = mlp.gensym "table_insert", mlp.gensym "table" - - ----------------------------------------------------------------------------- - -- 1 - Build the loop's core: if it has suffix "...", every elements of the - -- multi-return must be inserted, hence the extra [for] loop. - ----------------------------------------------------------------------------- - match core with - | `Dots{ x } -> - local w = mlp.gensym() - r = +{stat: for -{w} in values( -{x} ) do -{ `Call{ ti, v, w } } end } - | `Pair{ k, w } -> - r = `Set{ { `Index{ v, k } }, { w } } - | _ -> r = `Call{ ti, v, core } - end - - ----------------------------------------------------------------------------- - -- 2 - Stack the if and for control structures, from outside to inside. - -- This is done in a destructive way for the elements of [list]. - ----------------------------------------------------------------------------- - for i = #list, 1, -1 do - table.insert (list[i], {r}) - r = list[i] - end - if no_unpack then - return `Stat{ { `Local{ {ti, v}, { +{table.insert}, `Table} }, r }, v } - else - return +{ function() - local -{ti}, -{v} = table.insert, { } - -{r}; return unpack(-{v}) - end () } - end -end - -local function table_content_builder (list) - match list with - | { `Comp{ y, acc } } -> return comp_builder( y, acc, "no unpack") - | _ -> - local tables = { `Table } - local ctable = tables[1] - local function flush() ctable=`Table; table.insert(tables, ctable) end - for x in values(list) do - match x with - | `Comp{ y, acc } -> table.insert(ctable, comp_builder(y, acc)); flush() - | `Dots{ y } -> table.insert(ctable, y); flush() - | _ -> table.insert (ctable, x); - end - end - match tables with - | { x } | { x, { } } -> return x - | _ -> - if #tables[#tables]==0 then table.remove(tables) end --suppress empty table - return `Call{ +{table.cat}, unpack(tables) } - end - end -end - -mlp.table_field = gg.expr{ name="table cell", - primary = mlp.table_field, - suffix = { name="table cell suffix", - { "...", builder = dots_builder }, - { "for", mlp.for_header, builder = for_builder }, - { "if", mlp.expr, builder = if_builder } } } - -mlp.table_content.builder = table_content_builder - ---[[ -mlp.stat:add{ "for", gg.expr { - primary = for_header, - suffix = { - { "for", mlp.for_header, builder = for_builder }, - { "if", mlp.expr, builder = if_builder } } }, - "do", mlp.block, "end", builder = for_stat_builder } ---]] - --------------------------------------------------------------------------------- --- Back-end for improved index operator. --------------------------------------------------------------------------------- -local function index_builder(a, suffix) - match suffix[1] with - -- Single index, no range: keep the native semantics - | { { e, false } } -> return `Index{ a, e } - -- Either a range, or multiple indexes, or both - | ranges -> - local r = `Call{ +{table.isub}, a } - local function acc (x,y) table.insert (r,x); table.insert (r,y) end - for seq in ivalues (ranges) do - match seq with - | { e, false } -> acc(e,e) - | { e, f } -> acc(e,f) - end - end - return r - end -end - --------------------------------------------------------------------------------- --- Improved "[...]" index operator: --- * support for multi-indexes ("foo[bar, gnat]") --- * support for ranges ("foo[bar ... gnat]") --------------------------------------------------------------------------------- -mlp.expr.suffix:del '[' -mlp.expr.suffix:add{ name="table index/range", - "[", gg.list{ - gg.sequence { mlp.expr, gg.onkeyword{ "...", mlp.expr } } , - separators = { ",", ";" } }, - "]", builder = index_builder } diff --git a/src/lib/metalua/extension/continue.mlua b/src/lib/metalua/extension/continue.mlua deleted file mode 100644 index 2bb2ad9..0000000 --- a/src/lib/metalua/extension/continue.mlua +++ /dev/null @@ -1,53 +0,0 @@ -require "metalua.walk" - ----------------------------------------------------------------------- --- * [loop_tags] are the tags of statements which support continue. --- * [loop_keywords] are the initial keywords which trigger the parsing --- of these statements: they're indeed indexed by keyword in [mlp.stat]. ----------------------------------------------------------------------- - -local loop_tags = table.transpose{ "Forin", "Fornum", "While", "Repeat" } -local loop_keywords = { "for", "while", "repeat" } - ----------------------------------------------------------------------- --- This function takes the AST of a continue-enabled loop, parse --- its body to find all instances of [`Continue]. If any of them --- is found ([label~=nil]), they're transformed in [`Goto{...}], and --- the corresponding label is added at the end of the loop's body. --- --- Caveat: if a [continue] appears in the non-body part of a loop --- (and therefore is relative to some enclosing loop), it isn't --- handled, and therefore causes a compilation error. This could --- only happen due in a [`Stat{ }], however, since [`Function{ }] --- cuts the search for [`Continue]. ----------------------------------------------------------------------- -local function loop_transformer (ast) - local label - local cfg = { stat = { }; expr = { } } - - function cfg.stat.down (x) - if loop_tags[x.tag] then return 'break' - elseif x.tag=='Continue' then - if not label then label = mlp.gensym 'continue' end - x <- `Goto{ label } - end - end - - function cfg.expr.down (x) - return x.tag=='Function' and 'break' - end - - local loop_body = ast.tag=="Repeat" and ast[1] or ast[#ast] - walk.block (cfg, loop_body) - if label then table.insert (loop_body, `Label{ label }) end -end - ----------------------------------------------------------------------- --- Register the transformer for each kind of loop: ----------------------------------------------------------------------- -for keyword in values (loop_keywords) do - mlp.stat:get(keyword).transformers:add (loop_transformer) -end - -mlp.lexer:add "continue" -mlp.stat:add{ "continue", builder = ||`Continue } diff --git a/src/lib/metalua/extension/localin.mlua b/src/lib/metalua/extension/localin.mlua deleted file mode 100644 index fb336e6..0000000 --- a/src/lib/metalua/extension/localin.mlua +++ /dev/null @@ -1,2 +0,0 @@ -mlp.expr:add{ "local", mlp.id, "=", mlp.expr, "in", mlp.expr, - builder=|x| `Stat{ { `Local{ { x[1] }, { x[2] } } }, x[3] } } \ No newline at end of file diff --git a/src/lib/metalua/extension/log.mlua b/src/lib/metalua/extension/log.mlua deleted file mode 100644 index 80cb5b2..0000000 --- a/src/lib/metalua/extension/log.mlua +++ /dev/null @@ -1,39 +0,0 @@ -require 'metalua.dollar' - --{ extension 'match' } - -function dollar.log(...) - local args = {...} - local ti = table.insert - local code = { } - local nohash = false - local width = 80 - - local i=1 - if args[i].tag=='String' then - ti(code, +{print(" [LOG] "..-{args[1]})}) - i += 1 - end - - local xtra_args, names, vals = { }, { }, { } - for i=i, #args do - match args[i] with - | +{ 'nohash' } -> nohash = true - | `Number{ w } -> width = w - --| `String{...} | `Number{...} -> ti (xtra_args, args[i]) - | `Id{n} -> ti (names, n); ti (vals, args[i]) - | x -> ti (names, table.tostring(x, 'nohash')); ti (vals, x) - end - end - - for i=1, #names do - local msg = string.format(" [LOG] %s = ", names[i]) - local printer = `Call{ +{table.tostring}, - vals[i], - `Number{ width }, - `Number{ #msg } } - if nohash then ti(printer, +{'nohash'}) end - ti (code, `Call{ +{printf}, +{"%s%s"}, `String{ msg }, printer }) - end - return code -end diff --git a/src/lib/metalua/extension/ternary.mlua b/src/lib/metalua/extension/ternary.mlua deleted file mode 100644 index 9b0043f..0000000 --- a/src/lib/metalua/extension/ternary.mlua +++ /dev/null @@ -1,10 +0,0 @@ -local function b(x, suffix) - local v, ontrue, onfalse = mlp.gensym "test", unpack (suffix) - return `Stat{ - +{ block: - local -{v} - if -{x} then (-{v}) = -{ontrue} else (-{v}) = -{onfalse or `Nil} end }, - v } -end - -mlp.expr.suffix:add{ "?", mlp.expr, gg.onkeyword{ ",", mlp.expr }, prec=5, builder=b } diff --git a/src/lib/metalua/extension/trycatch.mlua b/src/lib/metalua/extension/trycatch.mlua deleted file mode 100644 index 5466a07..0000000 --- a/src/lib/metalua/extension/trycatch.mlua +++ /dev/null @@ -1,189 +0,0 @@ --{ extension 'match' } - --------------------------------------------------------------------------------- --- --- TODO: --- --- * Hygienize calls to pcall() --- --------------------------------------------------------------------------------- - --{ extension 'H' } --{ extension 'log' } - --- Get match parsers and builder, for catch cases handling: -local match_alpha = require 'metalua.extension.match' -local H = H:new{side='inside', alpha = match_alpha } - --- We'll need to track rogue return statements: -require 'metalua.walk' - --- Put a block AST into a pcall(): -local mkpcall = |block| +{pcall(function() -{block} end)} - --- The statement builder: -function trycatch_builder(x) - --$log ("trycatch_builder", x, 'nohash', 60) - local try_code, catch_cases, finally_code = unpack(x) - local insert_return_catcher = false - - -- Can't be hygienize automatically by the current version of H, as - -- it must bridge from inside user code (hacjed return statements) - -- to outside macro code. - local caught_return = !mlp.gensym 'caught_return' - local saved_args - - !try_code; !(finally_code or { }) - -- FIXME: Am I sure there's no need to hygienize inside? - --[[if catch_cases then - for case in ivalues(catch_cases) do - --$log(case,'nohash') - local patterns, guard, block = unpack(case) - ! block - end - end]] - - - ---------------------------------------------------------------- - -- Returns in the try-block must be transformed: - -- from the user's PoV, the code in the try-block isn't - -- a function, therefore a return in it must not merely - -- end the execution of the try block, but: - -- * not cause any error to be caught; - -- * let the finally-block be executed; - -- * only then, let the enclosing function return with the - -- appropraite values. - -- The way to handle that is that any returned value is stored - -- into the runtime variable caught_return, then a return with - -- no value is sent, to stop the execution of the try-code. - -- - -- Similarly, a return in a catch case code must not prevent - -- the finally-code from being run. - -- - -- This walker catches return statements and perform the relevant - -- transformation into caught_return setting + empty return. - -- - -- There is an insert_return_catcher compile-time flag, which - -- allows to avoid inserting return-handling code in the result - -- when not needed. - ---------------------------------------------------------------- - local replace_returns_and_dots do - local function f(x) - match x with - | `Return{...} -> - insert_return_catcher = true - -- Setvar's 'caught_return' code can't be hygienize by H currently. - local setvar = `Set{ {caught_return}, { `Table{ unpack(x) } } } - x <- { setvar; `Return }; x.tag = nil; - --$log('transformed return stat:', x, 60) - return 'break' - | `Function{...} -> return 'break' - -- inside this, returns would be the nested function's, not ours. - | `Dots -> - if not saved_args then saved_args = mlp.gensym 'args' end - x <- `Call{ `Id 'unpack', saved_args } - | _ -> -- pass - end - end - local cfg = { stat = {down=f}, expr = {down=f} } - replace_returns_and_dots = |x| walk.block(cfg, x) - end - - -- parse returns in the try-block: - replace_returns_and_dots (try_code) - - -- code handling the error catching process: - local catch_result do - if catch_cases and #catch_cases>0 then - ---------------------------------------------------------- - -- Protect catch code against failures: they run in a pcall(), and - -- the result is kept in catch_* vars so that it can be used to - -- relaunch the error after the finally code has been executed. - ---------------------------------------------------------- - for x in ivalues (catch_cases) do - local case_code = x[3] - -- handle rogue returns: - replace_returns_and_dots (case_code) - -- in case of error in the catch, we still need to run "finally": - x[3] = +{block: catch_success, catch_error = -{mkpcall(case_code)}} - end - ---------------------------------------------------------- - -- Uncaught exceptions must not cause a mismatch, - -- so we introduce a catch-all do-nothing last case: - ---------------------------------------------------------- - table.insert (catch_cases, { { { `Id '_' } }, false, { } }) - catch_result = spmatch.match_builder{ {+{user_error}}, catch_cases } - else - catch_result = { } - end - end - - ---------------------------------------------------------------- - -- Build the bits of code that will handle return statements - -- in the user code (try-block and catch-blocks). - ---------------------------------------------------------------- - local caught_return_init, caught_return_rethrow do - if insert_return_catcher then - caught_return_init = `Local{{caught_return}} - caught_return_rethrow = - +{stat: if -{caught_return} then return unpack(-{caught_return}) end} - else - caught_return_init, caught_return_rethrow = { }, { } - end - end - - local saved_args_init = - saved_args and `Local{ {saved_args}, { `Table{`Dots} } } or { } - - -- The finally code, to execute no matter what: - local finally_result = finally_code or { } - - -- And the whole statement, gluing all taht together: - local result = +{stat: - do - -{ saved_args_init } - -{ caught_return_init } - local user_success, user_error = -{mkpcall(try_code)} - local catch_success, catch_error = false, user_error - if not user_success then -{catch_result} end - -{finally_result} - if not user_success and not catch_success then error(catch_error) end - -{ caught_return_rethrow } - end } - - H(result) - - return result -end - -function catch_case_builder(x) - --$log ("catch_case_builder", x, 'nohash', 60) - local patterns, guard, _, code = unpack(x) - -- patterns ought to be a pattern_group, but each expression must - -- be converted into a single-element pattern_seq. - for i = 1, #patterns do patterns[i] = {patterns[i]} end - return { patterns, guard, code } -end - -mlp.lexer:add{ 'try', 'catch', 'finally', '->' } -mlp.block.terminators:add{ 'catch', 'finally' } - -mlp.stat:add{ - 'try', - mlp.block, - gg.onkeyword{ 'catch', - gg.list{ - gg.sequence{ - mlp.expr_list, - gg.onkeyword{ 'if', mlp.expr }, - gg.optkeyword 'then', - mlp.block, - builder = catch_case_builder }, - separators = 'catch' } }, - gg.onkeyword{ 'finally', mlp.block }, - 'end', - builder = trycatch_builder } - -return H.alpha - - diff --git a/src/lib/metalua/extension/types-runtime.mlua b/src/lib/metalua/extension/types-runtime.mlua deleted file mode 100644 index 5769f8a..0000000 --- a/src/lib/metalua/extension/types-runtime.mlua +++ /dev/null @@ -1,159 +0,0 @@ --------------------------------------------------------------------------------- --- Initialize the types table. It has an __index metatable entry, --- so that if a symbol is not found in it, it is looked for in the current --- environment. It allows to write things like [ n=3; x :: vector(n) ]. --------------------------------------------------------------------------------- -types = { } -setmetatable (types, { __index = getfenv(0)}) - -function types.error (fmt, ...) - error(string.format("Runtime type-checking failure: "..fmt, ...)) -end - --------------------------------------------------------------------------------- --- Add a prefix to an error message, if an error occurs. --- Useful for type checkers that call sub-type-checkers. --------------------------------------------------------------------------------- -local function nest_error (prefix, ...) - local status, msg = pcall(...) - if not status then types.error("%s:\n%s", prefix, msg) end -end - --------------------------------------------------------------------------------- --- Built-in types --------------------------------------------------------------------------------- -for typename in values{ "number", "string", "boolean", "function", "thread" } do - types[typename] = - function (val) - if type(val) ~= typename then types.error ("%s expected", typename) end - end -end - -function types.integer(val) - if type(val)~='number' or val%1~=0 then types.error 'integer expected' end -end - --------------------------------------------------------------------------------- --- table(foo) checks --- table(foo, bar) checks --- table(i) where i is an integer checks --- table(i, j) where i and j are integers checks --- Integers and key/value types can be combined --------------------------------------------------------------------------------- -function types.table (...) - - local key_type, val_type, range_from, range_to - -- arguments parsing - for x in values{...} do - if type(x) == "number" then - if range2 then types.error "Invalid type: too many numbers in table type" - elseif range1 then range2 = x - else range1 = x end - else - if type_key then types.error "Invalid type: too many types" - elseif type_val then type_key, type_val = type_val, x - else type_val = x end - end - end - if not range2 then range2=range1 end - if not type_key then type_key = types.integer end - return function (val) - if type(val) ~= "table" then types.error "table expected" end - local s = #val - if range2 and range2 > s then types.error "Not enough table elements" end - if range1 and range1 < s then types.error "Too many elements table elements" end - for k,v in pairs(val) do - nest_error ("in table key", type_key, k) - nest_error ("in table value", type_val, v) - end - end -end - --------------------------------------------------------------------------------- --- [list (subtype)] checks that the term is a table, and all of its --- integer-indexed elements are of type [subtype]. --------------------------------------------------------------------------------- -types.list = |...| types.table (types.integer, ...) - --------------------------------------------------------------------------------- --- Check that [x] is an integral number --------------------------------------------------------------------------------- -function types.int (x) - if type(x)~="number" or x%1~=0 then types.error "Integer number expected" end -end - --------------------------------------------------------------------------------- --- [range(a,b)] checks that number [val] is between [a] and [b]. [a] and [b] --- can be omitted. --------------------------------------------------------------------------------- -function types.range (a,b) - return function (val) - if type(val)~="number" or a and valb then - types.error ("Number between %s and %s expected", - a and tostring(a) or "-infty", - b and tostring(b) or "+infty") - end - end -end - --------------------------------------------------------------------------------- --- [inter (x, y)] checks that the term has both types [x] and [y]. --------------------------------------------------------------------------------- -function types.inter (...) - local args={...} - return function(val) - for t in values(args) do nest_error ("in inter type", t, args) end - end -end - --------------------------------------------------------------------------------- --- [inter (x, y)] checks that the term has type either [x] or [y]. --------------------------------------------------------------------------------- -function types.union (...) - local args={...} - return function(val) - for t in values(args) do if pcall(t, val) then return end end - types.error "None of the types in the union fits" - end -end - --------------------------------------------------------------------------------- --- [optional(t)] accepts values of types [t] or [nil]. --------------------------------------------------------------------------------- -function types.optional(t) - return function(val) - if val~=nil then nest_error("In optional type", t, val) end - end -end - --------------------------------------------------------------------------------- --- A call to this is done on litteral tables passed as types, i.e. --- type {1,2,3} is transformed into types.__table{1,2,3}. --------------------------------------------------------------------------------- -function types.__table(s_type) - return function (s_val) - if type(s_val) ~= "table" then types.error "Struct table expected" end - for k, field_type in pairs (s_type) do - nest_error ("in struct field "..k, field_type, s_val[k]) - end - end -end - --------------------------------------------------------------------------------- --- Same as __table, except that it's called on literal strings. --------------------------------------------------------------------------------- -function types.__string(s_type) - return function (s_val) - if s_val ~= s_type then - types.error("String %q expected", s_type) - end - end -end - --------------------------------------------------------------------------------- --- Top and Bottom: --------------------------------------------------------------------------------- -function types.any() end -function types.none() types.error "Empty type" end -types.__or = types.union -types.__and = types.inter \ No newline at end of file diff --git a/src/lib/metalua/extension/types.mlua b/src/lib/metalua/extension/types.mlua deleted file mode 100644 index bbcd53a..0000000 --- a/src/lib/metalua/extension/types.mlua +++ /dev/null @@ -1,352 +0,0 @@ --- This extension inserts type-checking code at approriate place in the code, --- thanks to annotations based on "::" keyword: --- --- * function declarations can be annotated with a returned type. When they --- are, type-checking code is inserted in each of their return statements, --- to make sure they return the expected type. --- --- * function parameters can also be annotated. If they are, type-checking --- code is inserted in the function body, which checks the arguments' types --- and cause an explicit error upon incorrect calls. Moreover, if a new value --- is assigned to the parameter in the function's body, the new value's type --- is checked before the assignment is performed. --- --- * Local variables can also be annotated. If they are, type-checking --- code is inserted before any value assignment or re-assignment is --- performed on them. --- --- Type checking can be disabled with: --- --- -{stat: types.enabled = false } --- --- Code transformation is performed at the chunk level, i.e. file by --- file. Therefore, it the value of compile-time variable --- [types.enabled] changes in the file, the only value that counts is --- its value once the file is entirely parsed. --- --- Syntax --- ====== --- --- Syntax annotations consist of "::" followed by a type --- specifier. They can appear after a function parameter name, after --- the closing parameter parenthese of a function, or after a local --- variable name in the declaration. See example in samples. --- --- Type specifiers are expressions, in which identifiers are taken --- from table types. For instance, [number] is transformed into --- [types.number]. These [types.xxx] fields must contain functions, --- which generate an error when they receive an argument which doesn't --- belong to the type they represent. It is perfectly acceptible for a --- type-checking function to return another type-checking function, --- thus defining parametric/generic types. Parameters can be --- identifiers (they're then considered as indexes in table [types]) --- or literals. --- --- Design hints --- ============ --- --- This extension uses the code walking library [walk] to globally --- transform the chunk AST. See [chunk_transformer()] for details --- about the walker. --- --- During parsing, type informations are stored in string-indexed --- fields, in the AST nodes of tags `Local and `Function. They are --- used by the walker to generate code only if [types.enabled] is --- true. --- --- TODO --- ==== --- --- It's easy to add global vars type-checking, by declaring :: as an --- assignment operator. It's easy to add arbitrary expr --- type-checking, by declaring :: as an infix operator. How to make --- both cohabit? - --------------------------------------------------------------------------------- --- --- Function chunk_transformer() --- --------------------------------------------------------------------------------- --- --- Takes a block annotated with extra fields, describing typing --- constraints, and returns a normal AST where these constraints have --- been turned into type-checking instructions. --- --- It relies on the following annotations: --- --- * [`Local{ }] statements may have a [types] field, which contains a --- id name ==> type name map. --- --- * [Function{ }] expressions may have an [param_types] field, also a --- id name ==> type name map. They may also have a [ret_type] field --- containing the type of the returned value. --- --- Design hints: --- ============= --- --- It relies on the code walking library, and two states: --- --- * [return_types] is a stack of the expected return values types for --- the functions currently in scope, the most deeply nested one --- having the biggest index. --- --- * [scopes] is a stack of id name ==> type name scopes, one per --- currently active variables scope. --- --- What's performed by the walker: --- --- * Assignments to a typed variable involve a type checking of the --- new value; --- --- * Local declarations are checked for additional type declarations. --- --- * Blocks create and destroy variable scopes in [scopes] --- --- * Functions create an additional scope (around its body block's scope) --- which retains its argument type associations, and stacks another --- return type (or [false] if no type constraint is given) --- --- * Return statements get the additional type checking statement if --- applicable. --- --------------------------------------------------------------------------------- - --- TODO: unify scopes handling with free variables detector --- FIXME: scopes are currently incorrect anyway, only functions currently define a scope. - -require "metalua.walk" - --{ extension 'match' } - -module("types", package.seeall) - -enabled = true - -local function chunk_transformer (block) - if not enabled then return end - local return_types, scopes = { }, { } - local cfg = { block = { }; stat = { }; expr = { } } - - function cfg.stat.down (x) - match x with - | `Local{ lhs, rhs, types = x_types } -> - -- Add new types declared by lhs in current scope. - local myscope = scopes [#scopes] - for var, type in pairs (x_types) do - myscope [var] = process_type (type) - end - -- Type-check each rhs value with the type of the - -- corresponding lhs declaration, if any. Check backward, in - -- case a local var name is used more than once. - for i = 1, max (#lhs, #rhs) do - local type, new_val = myscope[lhs[i][1]], rhs[i] - if type and new_val then - rhs[i] = checktype_builder (type, new_val, 'expr') - end - end - | `Set{ lhs, rhs } -> - for i=1, #lhs do - match lhs[i] with - | `Id{ v } -> - -- Retrieve the type associated with the variable, if any: - local j, type = #scopes, nil - repeat j, type = j-1, scopes[j][v] until type or j==0 - -- If a type constraint is found, apply it: - if type then rhs[i] = checktype_builder(type, rhs[i] or `Nil, 'expr') end - | _ -> -- assignment to a non-variable, pass - end - end - | `Return{ r_val } -> - local r_type = return_types[#return_types] - if r_type then - x <- `Return{ checktype_builder (r_type, r_val, 'expr') } - end - | _ -> -- pass - end - end - - function cfg.expr.down (x) - if x.tag ~= 'Function' then return end - local new_scope = { } - table.insert (scopes, new_scope) - for var, type in pairs (x.param_types or { }) do - new_scope[var] = process_type (type) - end - local r_type = x.ret_type and process_type (x.ret_type) or false - table.insert (return_types, r_type) - end - - ------------------------------------------------------------------- - -- Unregister the returned type and the variable scope in which - -- arguments are registered; - -- then, adds the parameters type checking instructions at the - -- beginning of the function, if applicable. - ------------------------------------------------------------------- - function cfg.expr.up (x) - if x.tag ~= 'Function' then return end - -- Unregister stuff going out of scope: - table.remove (return_types) - table.remove (scopes) - -- Add initial type checking: - for v, t in pairs(x.param_types or { }) do - table.insert(x[2], 1, checktype_builder(t, `Id{v}, 'stat')) - end - end - - cfg.block.down = || table.insert (scopes, { }) - cfg.block.up = || table.remove (scopes) - - walk.block(cfg, block) -end - --------------------------------------------------------------------------- --- Perform required transformations to change a raw type expression into --- a callable function: --- --- * identifiers are changed into indexes in [types], unless they're --- allready indexed, or into parentheses; --- --- * literal tables are embedded into a call to types.__table --- --- This transformation is not performed when type checking is disabled: --- types are stored under their raw form in the AST; the transformation is --- only performed when they're put in the stacks (scopes and return_types) --- of the main walker. --------------------------------------------------------------------------- -function process_type (type_term) - -- Transform the type: - cfg = { expr = { } } - - function cfg.expr.down(x) - match x with - | `Index{...} | `Paren{...} -> return 'break' - | _ -> -- pass - end - end - function cfg.expr.up (x) - match x with - | `Id{i} -> x <- `Index{ `Id "types", `String{ i } } - | `Table{...} | `String{...} | `Op{...} -> - local xcopy, name = table.shallow_copy(x) - match x.tag with - | 'Table' -> name = '__table' - | 'String' -> name = '__string' - | 'Op' -> name = '__'..x[1] - end - x <- `Call{ `Index{ `Id "types", `String{ name } }, xcopy } - | `Function{ params, { results } } if results.tag=='Return' -> - results.tag = nil - x <- `Call{ +{types.__function}, params, results } - | `Function{...} -> error "malformed function type" - | _ -> -- pass - end - end - walk.expr(cfg, type_term) - return type_term -end - --------------------------------------------------------------------------- --- Insert a type-checking function call on [term] before returning --- [term]'s value. Only legal in an expression context. --------------------------------------------------------------------------- -local non_const_tags = table.transpose - { 'Dots', 'Op', 'Index', 'Call', 'Invoke', 'Table' } -function checktype_builder(type, term, kind) - -- Shove type-checking code into the term to check: - match kind with - | 'expr' if non_const_tags [term.tag] -> - local v = mlp.gensym() - return `Stat{ { `Local{ {v}, {term} }; `Call{ type, v } }, v } - | 'expr' -> - return `Stat{ { `Call{ type, term } }, term } - | 'stat' -> - return `Call{ type, term } - end -end - --------------------------------------------------------------------------- --- Parse the typechecking tests in a function definition, and adds the --- corresponding tests at the beginning of the function's body. --------------------------------------------------------------------------- -local function func_val_builder (x) - local typed_params, ret_type, body = unpack(x) - local e = `Function{ { }, body; param_types = { }; ret_type = ret_type } - - -- Build [untyped_params] list, and [e.param_types] dictionary. - for i, y in ipairs (typed_params) do - if y.tag=="Dots" then - assert(i==#typed_params, "`...' must be the last parameter") - break - end - local param, type = unpack(y) - e[1][i] = param - if type then e.param_types[param[1]] = type end - end - return e -end - --------------------------------------------------------------------------- --- Parse ":: type" annotation if next token is "::", or return false. --- Called by function parameters parser --------------------------------------------------------------------------- -local opt_type = gg.onkeyword{ "::", mlp.expr } - --------------------------------------------------------------------------- --- Updated function definition parser, which accepts typed vars as --- parameters. --------------------------------------------------------------------------- - --- Parameters parsing: -local id_or_dots = gg.multisequence{ { "...", builder = "Dots" }, default = mlp.id } - --- Function parsing: -mlp.func_val = gg.sequence{ - "(", gg.list{ - gg.sequence{ id_or_dots, opt_type }, terminators = ")", separators = "," }, - ")", opt_type, mlp.block, "end", - builder = func_val_builder } - -mlp.lexer:add { "::", "newtype" } -mlp.chunk.transformers:add (chunk_transformer) - --- Local declarations parsing: -local local_decl_parser = mlp.stat:get "local" [2].default - -local_decl_parser[1].primary = gg.sequence{ mlp.id, opt_type } - -function local_decl_parser.builder(x) - local lhs, rhs = unpack(x) - local s, stypes = `Local{ { }, rhs or { } }, { } - for i = 1, #lhs do - local id, type = unpack(lhs[i]) - s[1][i] = id - if type then stypes[id[1]]=type end - end - if next(stypes) then s.types = stypes end - return s -end - -function newtype_builder(x) - local lhs, rhs = unpack(x) - match lhs with - | `Id{ x } -> t = process_type (rhs) - | `Call{ `Id{ x }, ... } -> - t = `Function{ { }, rhs } - for i = 2, #lhs do - if lhs[i].tag ~= "Id" then error "Invalid newtype parameter" end - t[1][i-1] = lhs[i] - end - | _ -> error "Invalid newtype definition" - end - return `Let{ { `Index{ `Id "types", `String{ x } } }, { t } } -end - -mlp.stat:add{ "newtype", mlp.expr, "=", mlp.expr, builder = newtype_builder } - - --------------------------------------------------------------------------- --- Register as an operator --------------------------------------------------------------------------- ---mlp.expr.infix:add{ "::", prec=100, builder = |a, _, b| insert_test(a,b) } - -return +{ require (-{ `String{ package.metalua_extension_prefix .. 'types-runtime' } }) } diff --git a/src/lib/metalua/extension/withdo.mlua b/src/lib/metalua/extension/withdo.mlua deleted file mode 100644 index 6b89a64..0000000 --- a/src/lib/metalua/extension/withdo.mlua +++ /dev/null @@ -1,30 +0,0 @@ --- RAII in metalua. --- --- Write: --- with var_1, var_2... = val_1, val_2... do --- ... --- end --- --- will assign val_n to var_n foreach n, and guaranty that var_n:close() will be called, --- no matter what, even if the body causes an error, even if it returns, even --- if another :close() call causes an error, etc. No. Matter. What. - -require 'metalua.extension.trycatch' - -function withdo_builder (x) - local names, vals, body = unpack(x) - for i = #names, 1, -1 do - local name, val = names[i], vals[i] - body = trycatch_builder{ { `Set{ {name}, {val} }, body }, -- try-block - { }, -- catch-block - { +{ print ("closing "..-{`String{name[1]}}) }, - `Invoke{ name, `String "close" } } } - end - table.insert(body, 1, `Local{ names }) - return body -end - -mlp.lexer:add 'with' -mlp.stat:add{ - 'with', mlp.id_list, '=', mlp.expr_list, 'do', mlp.block, 'end', - builder = withdo_builder } diff --git a/src/lib/metalua/extension/xglobal-runtime.lua b/src/lib/metalua/extension/xglobal-runtime.lua deleted file mode 100644 index a2e97df..0000000 --- a/src/lib/metalua/extension/xglobal-runtime.lua +++ /dev/null @@ -1,41 +0,0 @@ -local _G = getfenv() -local _G_mt = getmetatable(_G) - - --- Set the __globals metafield in the global environment's metatable, --- if not already there. -if _G_mt then - if _G_mt.__globals then return else - print( "Warning: _G already has a metatable,".. - " which might interfere with xglobals") - _G_mt.__globals = { } - end -else - _G_mt = { __globals = { } } - setmetatable(_G, _G_mt) -end - --- add a series of variable names to the list of declared globals -function _G_mt.__newglobal(...) - local g = _G_mt.__globals - for v in ivalues{...} do g[v]=true end -end - --- Try to set a global that's not in _G: --- if it isn't declared, fail -function _G_mt.__newindex(_G, var, val) - if not _G_mt.__globals[var] then - error ("Setting undeclared global variable "..var) - end - rawset(_G, var, val) -end - --- Try to read a global that's not in _G: --- if it isn't declared, fail -function _G_mt.__index(_G, var) - if not _G_mt.__globals[var] then - error ("Reading undeclared global variable "..var) - end - return nil -end - diff --git a/src/lib/metalua/extension/xglobal.mlua b/src/lib/metalua/extension/xglobal.mlua deleted file mode 100644 index a285a11..0000000 --- a/src/lib/metalua/extension/xglobal.mlua +++ /dev/null @@ -1,20 +0,0 @@ --- WARNING, this is undertested, especially in cases where mutliple --- modules have their own fenvs. Use at your own risks. - -require 'strict' - -local function decl_builder(x) - local ids, vals = unpack(x) - local ids_as_strings = table.imap(|x| `String{x[1]}, ids) - local decl = `Call{ +{getmetatable(getfenv()).__newglobal}, - unpack(ids_as_strings) } - if vals then return { decl, `Set{ ids, vals } } - else return decl end -end - -mlp.lexer:add 'global' -mlp.stat:add{ - 'global', mlp.id_list, gg.onkeyword{ '=', mlp.expr_list }, - builder = decl_builder } - -return +{ require (-{ `String{ package.metalua_extension_prefix .. 'xglobal-runtime' } }) } diff --git a/src/lib/metalua/extension/xloop.mlua b/src/lib/metalua/extension/xloop.mlua deleted file mode 100644 index b8b20e6..0000000 --- a/src/lib/metalua/extension/xloop.mlua +++ /dev/null @@ -1,100 +0,0 @@ --{ extension 'match' } --{ extension 'log' } - -require 'metalua.walk' - ----------------------------------------------------------------------- --- Back-end: ----------------------------------------------------------------------- - --- Parse additional elements in a loop -loop_element = gg.multisequence{ - { 'while', mlp.expr, builder = |x| `Until{ `Op{ 'not', x[1] } } }, - { 'until', mlp.expr, builder = |x| `Until{ x[1] } }, - { 'if', mlp.expr, builder = |x| `If{ x[1] } }, - { 'unless', mlp.expr, builder = |x| `If{ `Op{ 'not', x[1] } } }, - { 'for', mlp.for_header, builder = |x| x[1] } } - --- Recompose the loop -function xloop_builder(x) - local first, elements, body = unpack(x) - - ------------------------------------------------------------------- - -- If it's a regular loop, don't bloat the code - ------------------------------------------------------------------- - if not next(elements) then - table.insert(first, body) - return first - end - - ------------------------------------------------------------------- - -- There's no reason to treat the first element in a special way - ------------------------------------------------------------------- - table.insert(elements, 1, first) - - ------------------------------------------------------------------- - -- if a header or a break must be able to exit the loops, ti will - -- set exit_label and use it (a regular break wouldn't be enough, - -- as it couldn't escape several nested loops.) - ------------------------------------------------------------------- - local exit_label - local function exit() - if not exit_label then exit_label = mlp.gensym 'break' [1] end - return `Goto{ exit_label } - end - - ------------------------------------------------------------------- - -- Compile all headers elements, from last to first - ------------------------------------------------------------------- - for i = #elements, 1, -1 do - local e = elements[i] - match e with - | `If{ cond } -> - body = `If{ cond, {body} } - | `Until{ cond } -> - body = +{stat: if -{cond} then -{exit()} else -{body} end } - | `Forin{ ... } | `Fornum{ ... } -> - table.insert (e, {body}); body=e - end - end - - ------------------------------------------------------------------- - -- Change breaks into gotos that escape all loops at once. - ------------------------------------------------------------------- - local cfg = { stat = { }, expr = { } } - function cfg.stat.down(x) - match x with - | `Break -> x <- exit() - | `Forin{ ... } | `Fornum{ ... } | `While{ ... } | `Repeat{ ... } -> - return 'break' - | _ -> -- pass - end - end - function cfg.expr.down(x) if x.tag=='Function' then return 'break' end end - walk.stat(cfg, body) - - if exit_label then body = { body, `Label{ exit_label } } end - return body -end - ----------------------------------------------------------------------- --- Front-end: ----------------------------------------------------------------------- - -mlp.lexer:add 'unless' -mlp.stat:del 'for' -mlp.stat:del 'while' - -loop_element_list = gg.list{ loop_element, terminators='do' } - -mlp.stat:add{ - 'for', mlp.for_header, loop_element_list, 'do', mlp.block, 'end', - builder = xloop_builder } - -mlp.stat:add{ - 'while', mlp.expr, loop_element_list, 'do', mlp.block, 'end', - builder = |x| xloop_builder{ `While{x[1]}, x[2], x[3] } } - -mlp.stat:add{ - 'unless', mlp.expr, 'then', mlp.block, 'end', - builder = |x| +{stat: if not -{x[1]} then -{x[2]} end} } diff --git a/src/lib/metalua/metaloop.mlua b/src/lib/metalua/metaloop.mlua deleted file mode 100644 index 88b59a8..0000000 --- a/src/lib/metalua/metaloop.mlua +++ /dev/null @@ -1,76 +0,0 @@ -require 'metalua.compiler' - -module ('metaloop', package.seeall) - -PRINT_AST = true -LINE_WIDTH = 60 -PROMPT = "M> " -PROMPT2 = ">> " - -do -- set readline() to a line reader, either editline otr a default - local status, _ = pcall(require, 'editline') - if status then - local rl_handle = editline.init 'metalua' - readline = |p| rl_handle:read(p) - else - function readline (p) - io.write (p) - io.flush () - return io.read '*l' - end - end -end - -function reached_eof(lx, msg) - return lx:peek().tag=='Eof' or msg:find "token `Eof" -end - -printf ("Metalua, interactive REPLoop.\n".. - "(c) 2006-2008 ") - -function run() - local lines = { } - while true do - local src, lx, ast, f, results, success - repeat - local line = readline(next(lines) and PROMPT2 or PROMPT) - if not line then print(); os.exit(0) end -- line==nil iff eof on stdin - if not next(lines) then - line = line:gsub('^%s*=', 'return ') - end - table.insert(lines, line) - src = table.concat (lines, "\n") - until #line>0 - - lx = mlc.lexstream_of_luastring(src) - success, ast = pcall(mlc.ast_of_lexstream, lx) - if success then - success, f = pcall(mlc.function_of_ast, ast, '=stdin') - if success then - results = { pcall(f) } - success = table.remove (results, 1) - if success then - -- Success! - table.iforeach(|x| table.print(x, LINE_WIDTH), results) - lines = { } - else - print "Evaluation error:" - print (results[1]) - lines = { } - end - else - print "Can't compile into bytecode:" - print (f) - lines = { } - end - else - -- If lx has been read entirely, try to read another - -- line before failing. - if not reached_eof(lx, ast) then - print "Can't compile source into AST:" - print (ast) - lines = { } - end - end - end -end \ No newline at end of file diff --git a/src/lib/metalua/mlc_xcall.lua b/src/lib/metalua/mlc_xcall.lua deleted file mode 100644 index 8af05f3..0000000 --- a/src/lib/metalua/mlc_xcall.lua +++ /dev/null @@ -1,119 +0,0 @@ --------------------------------------------------------------------------------- --- Execute an `mlc.ast_of_*()' in a separate lua process. --- Communication between processes goes through temporary files, --- for the sake of portability. --------------------------------------------------------------------------------- - -mlc_xcall = { } - --------------------------------------------------------------------------------- --- Number of lines to remove at the end of a traceback, should it be --- dumped due to a compilation error in metabugs mode. --------------------------------------------------------------------------------- -local STACK_LINES_TO_CUT = 7 - --------------------------------------------------------------------------------- --- (Not intended to be called directly by users) --- --- This is the back-end function, called in a separate lua process --- by `mlc_xcall.client_*()' through `os.execute()'. --- * inputs: --- * the name of a lua source file to compile in a separate process --- * the name of a writable file where the resulting ast is dumped --- with `serialize()'. --- * metabugs: if true and an error occurs during compilation, --- the compiler's stacktrace is printed, allowing meta-programs --- debugging. --- * results: --- * an exit status of 0 or -1, depending on whethet compilation --- succeeded; --- * the ast file filled will either the serialized ast, or the --- error message. --------------------------------------------------------------------------------- -function mlc_xcall.server (luafilename, astfilename, metabugs) - - -- We don't want these to be loaded when people only do client-side business - require 'metalua.compiler' - require 'serialize' - - mlc.metabugs = metabugs - - -- compile the content of luafile name in an AST, serialized in astfilename - --local status, ast = pcall (mlc.luafile_to_ast, luafilename) - local status, ast - local function compile() return mlc.luafile_to_ast (luafilename) end - if mlc.metabugs then - print 'mlc_xcall.server/metabugs' - --status, ast = xpcall (compile, debug.traceback) - --status, ast = xpcall (compile, debug.traceback) - local function tb(msg) - local r = debug.traceback(msg) - - -- Cut superfluous end lines - local line_re = '\n[^\n]*' - local re = "^(.-)" .. (line_re) :rep (STACK_LINES_TO_CUT) .. "$" - return r :strmatch (re) or r - end - --status, ast = xpcall (compile, debug.traceback) - status, ast = xpcall (compile, tb) - else status, ast = pcall (compile) end - local out = io.open (astfilename, 'w') - if status then -- success - out:write (serialize (ast)) - out:close () - os.exit (0) - else -- failure, `ast' is actually the error message - out:write (ast) - out:close () - os.exit (-1) - end -end - --------------------------------------------------------------------------------- --- Compile the file whose name is passed as argument, in a separate process, --- communicating through a temporary file. --- returns: --- * true or false, indicating whether the compilation succeeded --- * the ast, or the error message. --------------------------------------------------------------------------------- -function mlc_xcall.client_file (luafile) - - -- printf("\n\nmlc_xcall.client_file(%q)\n\n", luafile) - - local tmpfilename = os.tmpname() - local cmd = string.format ( - [=[lua -l metalua.mlc_xcall -e "mlc_xcall.server([[%s]], [[%s]], %s)"]=], - luafile, tmpfilename, mlc.metabugs and "true" or "false") - - -- printf("os.execute [[%s]]\n\n", cmd) - - local status = (0 == os.execute (cmd)) - local result -- ast or error msg - if status then - result = (lua_loadfile or loadfile) (tmpfilename) () - else - local f = io.open (tmpfilename) - result = f :read '*a' - f :close() - end - os.remove(tmpfilename) - return status, result -end - --------------------------------------------------------------------------------- --- Compile a source string into an ast, by dumping it in a tmp --- file then calling `mlc_xcall.client_file()'. --- returns: the same as `mlc_xcall.client_file()'. --------------------------------------------------------------------------------- -function mlc_xcall.client_literal (luasrc) - local srcfilename = os.tmpname() - local srcfile, msg = io.open (srcfilename, 'w') - if not srcfile then print(msg) end - srcfile :write (luasrc) - srcfile :close () - local status, ast = mlc_xcall.client_file (srcfilename) - os.remove(srcfilename) - return status, ast -end - -return mlc_xcall \ No newline at end of file diff --git a/src/lib/metalua/package2.lua b/src/lib/metalua/package2.lua deleted file mode 100644 index 78912ac..0000000 --- a/src/lib/metalua/package2.lua +++ /dev/null @@ -1,106 +0,0 @@ -local package = package - -require 'metalua.mlc' - -package.metalua_extension_prefix = 'metalua.extension.' - -package.mpath = os.getenv 'LUA_MPATH' or - './?.mlua;/usr/local/share/lua/5.1/?.mlua;'.. - '/usr/local/share/lua/5.1/?/init.mlua;'.. - '/usr/local/lib/lua/5.1/?.mlua;'.. - '/usr/local/lib/lua/5.1/?/init.mlua' - - ----------------------------------------------------------------------- --- resc(k) returns "%"..k if it's a special regular expression char, --- or just k if it's normal. ----------------------------------------------------------------------- -local regexp_magic = table.transpose{ - "^", "$", "(", ")", "%", ".", "[", "]", "*", "+", "-", "?" } -local function resc(k) - return regexp_magic[k] and '%'..k or k -end - ----------------------------------------------------------------------- --- Take a Lua module name, return the open file and its name, --- or and an error message. ----------------------------------------------------------------------- -function package.findfile(name, path_string) - local config_regexp = ("([^\n])\n"):rep(5):sub(1, -2) - local dir_sep, path_sep, path_mark, execdir, igmark = - package.config:strmatch (config_regexp) - name = name:gsub ('%.', dir_sep) - local errors = { } - local path_pattern = string.format('[^%s]+', resc(path_sep)) - for path in path_string:gmatch (path_pattern) do - --printf('path = %s, rpath_mark=%s, name=%s', path, resc(path_mark), name) - local filename = path:gsub (resc (path_mark), name) - --printf('filename = %s', filename) - local file = io.open (filename, 'r') - if file then return file, filename end - table.insert(errors, string.format("\tno lua file %q", filename)) - end - return false, table.concat(errors, "\n")..'\n' -end - ----------------------------------------------------------------------- --- Execute a metalua module sources compilation in a separate process --- Sending back the bytecode directly is difficult, as some shells --- (at least MS-Windows') interpret some characters. So rather than --- base64-encoding the bytecode, AST is returned from the child --- process, and converted to bytecode then function in the calling --- process. ----------------------------------------------------------------------- -local function spring_load(filename) - -- FIXME: handle compilation errors - local pattern = - [=[lua -l metalua.compiler -l serialize -e ]=].. - [=["print(serialize(mlc.ast_of_luafile [[%s]]))"]=] - local cmd = string.format (pattern, filename) - --print ("running command: ``" .. cmd .. "''") - local fd = io.popen (cmd) - local ast_src = fd:read '*a' - fd:close() - local ast = lua_loadstring (ast_src) () -- much faster than loadstring() - return mlc.function_of_ast (ast, filename) -end - ----------------------------------------------------------------------- --- Load a metalua source file. ----------------------------------------------------------------------- -function package.metalua_loader (name) - local file, filename_or_msg = package.findfile (name, package.mpath) - if not file then return filename_or_msg end - if package.metalua_nopopen then - local luastring = file:read '*a' - file:close() - return mlc.function_of_luastring (luastring, name) - else - file:close() - require 'metalua.mlc_xcall' - local status, ast = mlc_xcall.client_file (filename_or_msg) - return mlc.function_of_ast(ast) - end -end - ----------------------------------------------------------------------- --- Placed after lua/luac loader, so precompiled files have --- higher precedence. ----------------------------------------------------------------------- -table.insert(package.loaders, package.metalua_loader) - ----------------------------------------------------------------------- --- Load an extension. ----------------------------------------------------------------------- -function extension (name, noruntime) - local complete_name = package.metalua_extension_prefix..name - local x = require (complete_name) - if x==true then return - elseif type(x) ~= 'table' then - error ("extension returned %s instead of an AST", type(x)) - else - return x - end -end - -return package diff --git a/src/lib/metalua/runtime.lua b/src/lib/metalua/runtime.lua deleted file mode 100644 index 5fb0cbb..0000000 --- a/src/lib/metalua/runtime.lua +++ /dev/null @@ -1,3 +0,0 @@ -require 'metalua.base' -require 'metalua.table2' -require 'metalua.string2' diff --git a/src/lib/metalua/string2.lua b/src/lib/metalua/string2.lua deleted file mode 100644 index 60c186d..0000000 --- a/src/lib/metalua/string2.lua +++ /dev/null @@ -1,44 +0,0 @@ - ----------------------------------------------------------------------- ----------------------------------------------------------------------- --- --- String module extension --- ----------------------------------------------------------------------- ----------------------------------------------------------------------- - --- Courtesy of lua-users.org -function string.split(str, pat) - local t = {} - local fpat = "(.-)" .. pat - local last_end = 1 - local s, e, cap = string.find(str, fpat, 1) - while s do - if s ~= 1 or cap ~= "" then - table.insert(t,cap) - end - last_end = e+1 - s, e, cap = string.find(str, fpat, last_end) - end - if last_end <= string.len(str) then - cap = string.sub(str, last_end) - table.insert(t, cap) - end - return t -end - --- "match" is regularly used as a keyword for pattern matching, --- so here is an always available substitute. -string.strmatch = string["match"] - --- change a compiled string into a function -function string.undump(str) - if str:strmatch '^\027LuaQ' or str:strmatch '^#![^\n]+\n\027LuaQ' then - local f = (lua_loadstring or loadstring)(str) - return f - else - error "Not a chunk dump" - end -end - -return string \ No newline at end of file diff --git a/src/lib/metalua/table2.lua b/src/lib/metalua/table2.lua deleted file mode 100644 index 1db3544..0000000 --- a/src/lib/metalua/table2.lua +++ /dev/null @@ -1,380 +0,0 @@ ---------------------------------------------------------------------- ----------------------------------------------------------------------- --- --- Table module extension --- ----------------------------------------------------------------------- ----------------------------------------------------------------------- - --- todo: table.scan (scan1?) fold1? flip? - -function table.transpose(t) - local tt = { } - for a, b in pairs(t) do tt[b] = a end - return tt -end - -function table.iforeach(f, ...) - -- assert (type (f) == "function") [wouldn't allow metamethod __call] - local nargs = select("#", ...) - if nargs==1 then -- Quick iforeach (most common case), just one table arg - local t = ... - assert (type (t) == "table") - for i = 1, #t do - local result = f (t[i]) - -- If the function returns non-false, stop iteration - if result then return result end - end - else -- advanced case: boundaries and/or multiple tables - - -- fargs: arguments fot a single call to f - -- first, last: indexes of the first & last elements mapped in each table - -- arg1: index of the first table in args - - -- 1 - find boundaries if any - local args, fargs, first, last, arg1 = {...}, { } - if type(args[1]) ~= "number" then first, arg1 = 1, 1 -- no boundary - elseif type(args[2]) ~= "number" then first, last, arg1 = 1, args[1], 2 - else first, last, arg1 = args[1], args[2], 3 end - assert (nargs >= arg1) -- at least one table - -- 2 - determine upper boundary if not given - if not last then for i = arg1, nargs do - assert (type (args[i]) == "table") - last = max (#args[i], last) - end end - -- 3 - remove non-table arguments from args, adjust nargs - if arg1>1 then args = { select(arg1, unpack(args)) }; nargs = #args end - - -- 4 - perform the iteration - for i = first, last do - for j = 1, nargs do fargs[j] = args[j][i] end -- build args list - local result = f (unpack (fargs)) -- here is the call - -- If the function returns non-false, stop iteration - if result then return result end - end - end -end - -function table.imap (f, ...) - local result, idx = { }, 1 - local function g(...) result[idx] = f(...); idx=idx+1 end - table.iforeach(g, ...) - return result -end - -function table.ifold (f, acc, ...) - local function g(...) acc = f (acc,...) end - table.iforeach (g, ...) - return acc -end - --- function table.ifold1 (f, ...) --- return table.ifold (f, acc, 2, false, ...) --- end - -function table.izip(...) - local function g(...) return {...} end - return table.imap(g, ...) -end - -function table.ifilter(f, t) - local yes, no = { }, { } - for i=1,#t do table.insert (f(t[i]) and yes or no, t[i]) end - return yes, no -end - -function table.icat(...) - local result = { } - for t in values {...} do - for x in values (t) do - table.insert (result, x) - end - end - return result -end - -function table.iflatten (x) return table.icat (unpack (x)) end - -function table.irev (t) - local result, nt = { }, #t - for i=0, nt-1 do result[nt-i] = t[i+1] end - return result -end - -function table.isub (t, ...) - local ti, u = table.insert, { } - local args, nargs = {...}, select("#", ...) - for i=1, nargs/2 do - local a, b = args[2*i-1], args[2*i] - for i=a, b, a<=b and 1 or -1 do ti(u, t[i]) end - end - return u -end - -function table.iall (f, ...) - local result = true - local function g(...) return not f(...) end - return not table.iforeach(g, ...) - --return result -end - -function table.iany (f, ...) - local function g(...) return not f(...) end - return not table.iall(g, ...) -end - -function table.shallow_copy(x) - local y={ } - for k, v in pairs(x) do y[k]=v end - return y -end - --- Warning, this is implementation dependent: it relies on --- the fact the [next()] enumerates the array-part before the hash-part. -function table.cat(...) - local y={ } - for x in values{...} do - -- cat array-part - for _, v in ipairs(x) do table.insert(y,v) end - -- cat hash-part - local lx, k = #x - if lx>0 then k=next(x,lx) else k=next(x) end - while k do y[k]=x[k]; k=next(x,k) end - end - return y -end - -function table.deep_copy(x) - local tracker = { } - local function aux (x) - if type(x) == "table" then - local y=tracker[x] - if y then return y end - y = { }; tracker[x] = y - setmetatable (y, getmetatable (x)) - for k,v in pairs(x) do y[aux(k)] = aux(v) end - return y - else return x end - end - return aux(x) -end - -function table.override(dst, src) - for k, v in pairs(src) do dst[k] = v end - for i = #src+1, #dst do dst[i] = nil end - return dst -end - - -function table.range(a,b,c) - if not b then assert(not(c)); b=a; a=1 - elseif not c then c = (b>=a) and 1 or -1 end - local result = { } - for i=a, b, c do table.insert(result, i) end - return result -end - --- FIXME: new_indent seems to be always nil?! --- FIXME: accumulator function should be configurable, --- so that print() doesn't need to bufferize the whole string --- before starting to print. -function table.tostring(t, ...) - local PRINT_HASH, HANDLE_TAG, FIX_INDENT, LINE_MAX, INITIAL_INDENT = true, true - for _, x in ipairs {...} do - if type(x) == "number" then - if not LINE_MAX then LINE_MAX = x - else INITIAL_INDENT = x end - elseif x=="nohash" then PRINT_HASH = false - elseif x=="notag" then HANDLE_TAG = false - else - local n = string['match'](x, "^indent%s*(%d*)$") - if n then FIX_INDENT = tonumber(n) or 3 end - end - end - LINE_MAX = LINE_MAX or math.huge - INITIAL_INDENT = INITIAL_INDENT or 1 - - local current_offset = 0 -- indentation level - local xlen_cache = { } -- cached results for xlen() - local acc_list = { } -- Generated bits of string - local function acc(...) -- Accumulate a bit of string - local x = table.concat{...} - current_offset = current_offset + #x - table.insert(acc_list, x) - end - local function valid_id(x) - -- FIXME: we should also reject keywords; but the list of - -- current keywords is not fixed in metalua... - return type(x) == "string" - and string['match'](x, "^[a-zA-Z_][a-zA-Z0-9_]*$") - end - - -- Compute the number of chars it would require to display the table - -- on a single line. Helps to decide whether some carriage returns are - -- required. Since the size of each sub-table is required many times, - -- it's cached in [xlen_cache]. - local xlen_type = { } - local function xlen(x, nested) - nested = nested or { } - if x==nil then return #"nil" end - --if nested[x] then return #tostring(x) end -- already done in table - local len = xlen_cache[x] - if len then return len end - local f = xlen_type[type(x)] - if not f then return #tostring(x) end - len = f (x, nested) - xlen_cache[x] = len - return len - end - - -- optim: no need to compute lengths if I'm not going to use them - -- anyway. - if LINE_MAX == math.huge then xlen = function() return 0 end end - - xlen_type["nil"] = function () return 3 end - function xlen_type.number (x) return #tostring(x) end - function xlen_type.boolean (x) return x and 4 or 5 end - function xlen_type.string (x) return #string.format("%q",x) end - function xlen_type.table (adt, nested) - - -- Circular references detection - if nested [adt] then return #tostring(adt) end - nested [adt] = true - - local has_tag = HANDLE_TAG and valid_id(adt.tag) - local alen = #adt - local has_arr = alen>0 - local has_hash = false - local x = 0 - - if PRINT_HASH then - -- first pass: count hash-part - for k, v in pairs(adt) do - if k=="tag" and has_tag then - -- this is the tag -> do nothing! - elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 then - -- array-part pair -> do nothing! - else - has_hash = true - if valid_id(k) then x=x+#k - else x = x + xlen (k, nested) + 2 end -- count surrounding brackets - x = x + xlen (v, nested) + 5 -- count " = " and ", " - end - end - end - - for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", " - - nested[adt] = false -- No more nested calls - - if not (has_tag or has_arr or has_hash) then return 3 end - if has_tag then x=x+#adt.tag+1 end - if not (has_arr or has_hash) then return x end - if not has_hash and alen==1 and type(adt[1])~="table" then - return x-2 -- substract extraneous ", " - end - return x+2 -- count "{ " and " }", substract extraneous ", " - end - - -- Recursively print a (sub) table at given indentation level. - -- [newline] indicates whether newlines should be inserted. - local function rec (adt, nested, indent) - if not FIX_INDENT then indent = current_offset end - local function acc_newline() - acc ("\n"); acc (string.rep (" ", indent)) - current_offset = indent - end - local x = { } - x["nil"] = function() acc "nil" end - function x.number() acc (tostring (adt)) end - --function x.string() acc (string.format ("%q", adt)) end - function x.string() acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end - function x.boolean() acc (adt and "true" or "false") end - function x.table() - if nested[adt] then acc(tostring(adt)); return end - nested[adt] = true - - - local has_tag = HANDLE_TAG and valid_id(adt.tag) - local alen = #adt - local has_arr = alen>0 - local has_hash = false - - if has_tag then acc("`"); acc(adt.tag) end - - -- First pass: handle hash-part - if PRINT_HASH then - for k, v in pairs(adt) do - -- pass if the key belongs to the array-part or is the "tag" field - if not (k=="tag" and HANDLE_TAG) and - not (type(k)=="number" and k<=alen and math.fmod(k,1)==0) then - - -- Is it the first time we parse a hash pair? - if not has_hash then - acc "{ " - if not FIX_INDENT then indent = current_offset end - else acc ", " end - - -- Determine whether a newline is required - local is_id, expected_len = valid_id(k) - if is_id then expected_len = #k + xlen (v, nested) + #" = , " - else expected_len = xlen (k, nested) + - xlen (v, nested) + #"[] = , " end - if has_hash and expected_len + current_offset > LINE_MAX - then acc_newline() end - - -- Print the key - if is_id then acc(k); acc " = " - else acc "["; rec (k, nested, indent+(FIX_INDENT or 0)); acc "] = " end - - -- Print the value - rec (v, nested, indent+(FIX_INDENT or 0)) - has_hash = true - end - end - end - - -- Now we know whether there's a hash-part, an array-part, and a tag. - -- Tag and hash-part are already printed if they're present. - if not has_tag and not has_hash and not has_arr then acc "{ }"; - elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc - else - assert (has_hash or has_arr) - local no_brace = false - if has_hash and has_arr then acc ", " - elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then - -- No brace required; don't print "{", remember not to print "}" - acc (" "); rec (adt[1], nested, indent+(FIX_INDENT or 0)) - no_brace = true - elseif not has_hash then - -- Braces required, but not opened by hash-part handler yet - acc "{ " - if not FIX_INDENT then indent = current_offset end - end - - -- 2nd pass: array-part - if not no_brace and has_arr then - rec (adt[1], nested, indent+(FIX_INDENT or 0)) - for i=2, alen do - acc ", "; - if current_offset + xlen (adt[i], { }) > LINE_MAX - then acc_newline() end - rec (adt[i], nested, indent+(FIX_INDENT or 0)) - end - end - if not no_brace then acc " }" end - end - nested[adt] = false -- No more nested calls - end - local y = x[type(adt)] - if y then y() else acc(tostring(adt)) end - end - --printf("INITIAL_INDENT = %i", INITIAL_INDENT) - current_offset = INITIAL_INDENT or 0 - rec(t, { }, 0) - return table.concat (acc_list) -end - -function table.print(...) return print(table.tostring(...)) end - -return table \ No newline at end of file diff --git a/src/lib/metalua/walk.mlua b/src/lib/metalua/walk.mlua deleted file mode 100644 index c94a04e..0000000 --- a/src/lib/metalua/walk.mlua +++ /dev/null @@ -1,304 +0,0 @@ --------------------------------------------------------------------------------- --- Code walkers --- "Make everything as simple as possible, but not simpler". --- --- This library offers a generic way to write AST transforming --- functions. Macros can take bits of AST as parameters and generate a --- more complex AST with them; but modifying an AST a posteriori is --- much more difficult; typical tasks requiring code walking are --- transformation such as lazy evaluation or Continuation Passing --- Style, but more mundane operations are required in more macros than --- one would thing, such as "transform all returns which aren't inside --- a nested function into an error throwing". --- --- AST walking is an intrinsically advanced operation, and the --- interface of this library, although it tries to remain as simple as --- possible, is not trivial. You'll probably need to write a couple of --- walkers with it before feeling comfortable. --- --- --- We deal here with 3 important kinds of AST: statements, expressions --- and blocks. Code walkers for these three kinds for AST are called --- [walk.stat (cfg, ast)], [walk.expr (cfg, ast)] and [walk.block --- (cfg, ast)] respectively. the [cfg] parameter describes what shall --- happen as the AST is traversed by the walker, and [ast] is the tree --- itself. --- --- An aparte to fellow functional programmers: although Lua has --- got all the features that constitute a functional language, its --- heart, and in particular it table data, is imperative. It's often --- asking for trouble to work against the host language's nature, so --- code walkers are imperative, cope with it. Or use table.deep_copy() --- if you don't want issues with shared state. --- --- Since walkers are imperative (i.e. they transform the tree in --- place, rather than returning a fresh variant of it), you'll often --- want to override a node, i.e. keep its "pointer identity", but --- replace its content with a new one; this is done by --- table.override(), and is conveniently abbreviated as --- "target <- new_content". --- --- So, [cfg] can contain a series of sub-tables fields 'expr', 'stat', --- 'block'. each of them can contain a function up() and/or a function --- down(). --- --- * down() is called when the walker starts visiting a node of the --- matching kind, i.e. before any of its sub-nodes have been --- visited. down() is allowed to return either the string "break", --- which means "don't go further down this tree, don't try to walk --- its children", or nil, i.e. "please process with the children --- nodes". --- --- There are two reasons why you might want down() to return --- "break": either because you really weren't interested into the --- children nodes,or because you wanted to walk through them in a --- special way, and down() already performed this special walking. --- --- * up() is called just before the node is left, i.e. after all of --- its children nodes have been completely parsed, down and up. This --- is a good place to put treatments which rely on sub-nodes being --- already treated. Notice that if down() returned 'break', up() is --- run immediately after. --- --- In previous versions of this library, there were plenty of fancy --- configurable ways to decide whether an up() or down() functions --- would be triggered or not. Experience suggested that the best way --- is to keep it simpler, as done by the current design: the functions --- in sub-table expr are run on each expression node, and ditto for --- stat and block; the user is expected to use the pattern matching --- extension to decide whether to act or not on a given node. --- --- Advanced features --- ================= --- --- The version above is a strict subset of the truth: there are a --- couple of other, more advanced features in the library. --- --- Paths in visitor functions --- -------------------------- --- First, up() and down() don't take only one node as a parameter, but --- a series thereof: all the nested expr/stat/block nodes on the way --- up to the ast's root. For instance, when a walker works on --- +{ foo(bar*2+1) } an is on the node +{2}, up() and down() are called --- with arguments (+{bar*2}, +{bar*2+1}, +{foo(bar*2+1)}). --- --- `Call and `Invoke as statements --- ------------------------------- --- `Call and `Invoke are normally expressions, but they can also --- appear as statements. In this case, the cfg.expr.xxx() visitors --- aren't called on them. Sometimes you want to consider tham as --- expressions, sometimes not, and it's much easier to add a special --- case in cfg.stat.xxx() visitors than to determine whether we're in --- a statament's context in cfg.expr.xxx(), --- --- Extra walkers --- ------------- --- There are some second class walkers: walk.expr_list() and walk.guess(). --- --- * The first one walks through a list of expressions. Although used --- internally by the other walkers, it remains a second class --- citizen: the list it works on won't appear in the path of nested --- ASTs that's passed to up() and down(). This design choice has --- been made because there's no clear definition of what is or isn't --- an expr list in an AST, and anyway such lists are probably not --- part of metacoders' mental image of an AST, so it's been thought --- best to let people pretend they don't exist. --- --- * walk.guess() tries to guess the type of the AST it receives, --- according to its tag, and runs the appropriate walker. Node which --- can be both stats and exprs (`Call and `Invoke) are considered as --- expr. --- --- These three walkers, although used internally by the other walkers, --- remain second class citizens: the lists they work on won't appear --- in the path of nested ASTs that's passed to up() and down(). --- --- Tag dictionaries --- ---------------- --- There are two public dictionaries, walk.tags.stat and --- walk.tags.expr, which keep the set of all tags that can start a --- statement or an expression AST. They're used by walk.guess, and --- users sometimes need them as well, so they've been kept available. --- --- Binder visitor --- -------------- --- Finally, there's one last field in [cfg]: binder(). This function --- is called on identifiers in a binder position, i.e. `Id{ } nodes --- which create a scoped local variable, in `Function, `Fornum, `Local --- etc. The main use case for that function is to keep track of --- variables, captures, etc. and perform alpha conversions. In many --- cases that work is best done through the library 'walk.id', which --- understands the notions of scope, free variable, bound variable --- etc. --- --- Binder visitors are called just before the variable's scope starts, --- e.g. they're called after the right-hand-side has been visited in a --- `Local node, but before in a `Localrec node. --- --- TODO: document scopes, relaxed cfg descriptions --- ----------------------------------------------- --- --- Examples of cfg structures: --- --- { Id = f1, Local = f2 } --- f --- { up = f1, down = f2 } --- { scope = { up = f1, down = f2 }, up = f1, down = f2 } --- { stat = f1, expr = { up = f1 } } --- --- --------------------------------------------------------------------------------- - --{ extension "match" } - -walk = { traverse = { }; tags = { }; debug = false } - --------------------------------------------------------------------------------- --- Standard tags: can be used to guess the type of an AST, or to check --- that the type of an AST is respected. --------------------------------------------------------------------------------- -walk.tags.stat = table.transpose{ - 'Do', 'Set', 'While', 'Repeat', 'Local', 'Localrec', 'Return', - 'Fornum', 'Forin', 'If', 'Break', 'Goto', 'Label', - 'Call', 'Invoke' } -walk.tags.expr = table.transpose{ - 'Paren', 'Call', 'Invoke', 'Index', 'Op', 'Function', 'Stat', - 'Table', 'Nil', 'Dots', 'True', 'False', 'Number', 'String', 'Id' } - -local function scope (cfg, dir) - local h = cfg.scope and cfg.scope[dir] - if h then h() end -end - --------------------------------------------------------------------------------- --- These [walk.traverse.xxx()] functions are in charge of actually going through --- ASTs. At each node, they make sure to call the appropriate walker. --------------------------------------------------------------------------------- -function walk.traverse.stat (cfg, x, ...) - if walk.debug then printf("traverse stat %s", table.tostring(x)) end - local log = {...} - local B = |y| walk.block (cfg, y, x, unpack(log)) - local S = |y| walk.stat (cfg, y, x, unpack(log)) - local E = |y| walk.expr (cfg, y, x, unpack(log)) - local EL = |y| walk.expr_list (cfg, y, x, unpack(log)) - local I = |y| walk.binder_list (cfg, y, x, unpack(log)) - local function BS(y) - scope (cfg, 'down'); B(y); scope (cfg, 'up') - end - - match x with - | {...} if x.tag == nil -> for y in ivalues(x) do walk.stat(cfg, y, ...) end - -- no tag --> node not inserted in the history log - | `Do{...} -> BS(x) - | `Set{ lhs, rhs } -> EL(lhs); EL(rhs) - | `While{ cond, body } -> E(cond); BS(body) - | `Repeat{ body, cond } -> scope(cfg, 'down'); B(body); E(cond); scope(cfg, 'up') - | `Local{ lhs } -> I(lhs) - | `Local{ lhs, rhs } -> EL(rhs); I(lhs) - | `Localrec{ lhs, rhs } -> I(lhs); EL(rhs) - | `Fornum{ i, a, b, body } -> E(a); E(b); I{i}; BS(body) - | `Fornum{ i, a, b, c, body } -> E(a); E(b); E(c); I{i}; BS(body) - | `Forin{ i, rhs, body } -> EL(rhs); I(i); BS(body) - | `If{...} -> for i=1, #x-1, 2 do E(x[i]); BS(x[i+1]) end - if #x%2 == 1 then BS(x[#x]) end - | `Call{...}|`Invoke{...}|`Return{...} -> EL(x) - | `Break | `Goto{ _ } | `Label{ _ } -> -- nothing - | { tag=tag, ...} if walk.tags.stat[tag]-> - walk.malformed (cfg, x, unpack (log)) - | _ -> - walk.unknonw (cfg, x, unpack (log)) - end -end - -function walk.traverse.expr (cfg, x, ...) - if walk.debug then printf("traverse expr %s", table.tostring(x)) end - local log = {...} - local B = |y| walk.block (cfg, y, x, unpack(log)) - local S = |y| walk.stat (cfg, y, x, unpack(log)) - local E = |y| walk.expr (cfg, y, x, unpack(log)) - local EL = |y| walk.expr_list (cfg, y, x, unpack(log)) - local I = |y| walk.binder_list (cfg, y, x, unpack(log)) - match x with - | `Paren{ e } -> E(e) - | `Call{...} | `Invoke{...} -> EL(x) - | `Index{ a, b } -> E(a); E(b) - | `Op{ opid, ... } -> E(x[2]); if #x==3 then E(x[3]) end - | `Function{ params, body } -> I(params); scope(cfg, 'down'); B(body); scope (cfg, 'in') - | `Stat{ b, e } -> scope(cfg, 'down'); B(b); E(e); scope (cfg, 'in') - | `Table{ ... } -> - for i = 1, #x do match x[i] with - | `Pair{ k, v } -> E(k); E(v) - | v -> E(v) - end end - |`Nil|`Dots|`True|`False|`Number{_}|`String{_}|`Id{_} -> -- nothing - | { tag=tag, ...} if walk.tags.expr[tag]-> - walk.malformed (cfg, x, unpack (log)) - | _ -> - walk.unknonw (cfg, x, unpack (log)) - end -end - -function walk.traverse.block (cfg, x, ...) - assert(type(x)=='table', "traverse.block() expects a table") - for y in ivalues(x) do walk.stat(cfg, y, x, ...) end -end - -function walk.traverse.expr_list (cfg, x, ...) - assert(type(x)=='table', "traverse.expr_list() expects a table") - -- x doesn't appear in the log - for y in ivalues(x) do walk.expr(cfg, y, ...) end -end - ----------------------------------------------------------------------- --- Generic walker generator. --- * if `cfg' has an entry matching the tree name, use this entry --- * if not, try to use the entry whose name matched the ast kind --- * if an entry is a table, look for 'up' and 'down' entries --- * if it is a function, consider it as a `down' traverser. ----------------------------------------------------------------------- -local walker_builder = |cfg_field, traverse| function (cfg, x, ...) - local sub_cfg = type (x)=='table' and x.tag and cfg[x.tag] - or cfg[cfg_field] or cfg - local broken, down, up = false - if type(sub_cfg)=='table' then - down, up = sub_cfg.down, sub_cfg.up - elseif type(sub_cfg)=='function' or sub_cfg=='break' then - down, up = sub_cfg, nil - else error "Invalid walk config" end - - if down then - if down=='break' then broken='break' - else broken = down (x, ...) end - assert(not broken or broken=='break', - "Map functions must return 'break' or nil") - end - if not broken and traverse then traverse (cfg, x, ...) end - if up then up (x, ...) end -end - ----------------------------------------------------------------------- --- Declare [walk.stat], [walk.expr], [walk.block] and [walk.expr_list] ----------------------------------------------------------------------- -for w in values{ "stat", "expr", "block", "expr_list", - "malformed", "unknown" } do - walk[w] = walker_builder (w, walk.traverse[w]) -end - ----------------------------------------------------------------------- --- Walk a list of `Id{...} (mainly a helper function actually). ----------------------------------------------------------------------- -function walk.binder_list (cfg, x, ...) - local f = cfg.binder - if f then for v in ivalues(x) do f(v, ...) end end -end - ----------------------------------------------------------------------- --- Tries to guess the type of the AST then choose the right walkker. ----------------------------------------------------------------------- -function walk.guess (cfg, x, ...) - assert(type(x)=='table', "arg #2 in a walker must be an AST") - if walk.tags.expr[x.tag] then return walk.expr(cfg, x, ...) end - if walk.tags.stat[x.tag] then return walk.stat(cfg, x, ...) end - if not x.tag then return walk.block(cfg, x, ...) end - error ("Can't guess the AST type from tag "..(x.tag or '')) -end diff --git a/src/lib/metalua/walk/bindings.mlua b/src/lib/metalua/walk/bindings.mlua deleted file mode 100644 index 3827507..0000000 --- a/src/lib/metalua/walk/bindings.mlua +++ /dev/null @@ -1,43 +0,0 @@ -require 'metalua.walk' -require 'metalua.walk.scope' - -function bindings(ast) - -- binders :: ast => name => occurences - -- unbound :: name => occurences - -- scope :: name => ast - - local binders, unbound, cfg, scope = { }, { }, { scope={ } }, scope:new() - - -- * id: identifier entering in scope - -- * ast: statement or expr carrying this id, on of: - -- Local, Localrec, Forin, Fornum, Function. - function cfg.binder (id, ast) - if id.tag ~= 'Id' then return end - local id_name = id[1] - -- Reference in scope, so that the binding statement can be retrieved: - scope.current[id_name] = ast - -- Init the occurences list for this identifier: - if binders[ast] then binders[ast][id_name] = { } - else binders[ast] = { [id_name] = { } } end - end - - -- identifier occurence, not as a binder: reference this occurence - function cfg.Id (id) - local id_name = id[1] - -- ast which binds this id, might be nil: - local binder_ast = scope.current [id_name] - -- dict id_name => occurences, might be the list of unbound occurences: - local occur_dict = binder_ast and binders[binder_ast] or unbound - -- add an occurence of `id' in the occurences list: - local occurences = occur_dict [id_name] - if occurences then table.insert (occurences, id) - else occur_dict [id_name] = { id } end - end - - function cfg.scope.down() scope:push() end - function cfg.scope.up() scope:pop() end - - walk.guess (cfg, ast) - return binders, unbound -end - diff --git a/src/lib/metalua/walk/id.mlua b/src/lib/metalua/walk/id.mlua deleted file mode 100644 index 5e457e8..0000000 --- a/src/lib/metalua/walk/id.mlua +++ /dev/null @@ -1,186 +0,0 @@ --------------------------------------------------------------------------------- --- --- This library walks AST to gather information about the identifiers --- in it. It classifies them between free variables and bound --- variables, and keeps track of which AST node created a given bound --- variable occurence. --- --- walk_id (kind, ast) --- --- Input: --- * an AST kind: 'expr', 'stat', 'block', 'expr_list', 'binder_list', 'guess' --- * an AST of the corresponding kind. --- --- > string, AST --- --- Output: a table with two fields, 'bound' and 'free'; --- * free associates the name of each free variable with the list of --- all its occurences in the AST. That list is never empty. --- * bound associates each stat or expr binding a new variable with --- the occurences of that/those new variable(s). --- --- > { free = table (string, AST and `Id{ }); --- > bound = table (AST, table(AST and `Id{ })) } --- --- How it works --- ============ --- Walk the tree to: --- * locate open variables, and keep pointers on them so that they can --- be alpha converted. --- * locate variable bindings, so that we can find bound variables --- * locate bound variables, keep them in association with their --- binder, again in order to alpha-convert them. --- --- Special treatments: --- * `Function `Local `Localrec `Fornum `Forin have binders; --- `Local takes effect from the next statement, --- `Localrec from the current statement, --- `Function and other statments inside their bodies. --- * `Repeat has a special scoping rule for its condition. --- * blocks create temporary scopes --- * `Splice must stop the walking, so that user code won't be --- converted --- --------------------------------------------------------------------------------- - --{ extension 'match' } --{ extension 'log' } - -require 'metalua.walk' -require 'metalua.walk.scope' - --- variable lists auto-create empty list as values by default. -local varlist_mt = { __index = function (self, key) - local x={ }; self[key] = x; return x - end } - -local function _walk_id (kind, supercfg, ast, ...) - - assert(walk[kind], "Inbalid AST kind selector") - assert(type(supercfg=='table'), "Config table expected") - assert(type(ast)=='table', "AST expected") - - local cfg = { expr = { }; block = { }; stat = { } } - local scope = scope:new() - - local visit_bound_var, visit_free_var - if not supercfg.id then - printf("Warning, you're using the id walker without id visitor. ".. - "If you know what you want do to, then you're probably doing ".. - "something else...") - visit_bound_var = || nil - visit_free_var = || nil - else - visit_free_var = supercfg.id.free or || nil - visit_bound_var = supercfg.id.bound or || nil - end - - ----------------------------------------------------------------------------- - -- Check identifiers; add functions parameters to scope - ----------------------------------------------------------------------------- - function cfg.expr.down(x, ...) - -- Execute the generic expression walker; if it breaks. - -- don't do the id walking. - if supercfg.expr and supercfg.expr.down then - local r = supercfg.expr.down(x, ...) - if r then return r end - end - local parents = {...} - match x with - | `Id{ name } -> - local binder, r = scope.current[name] -- binder :: ast which bound var - if binder then - --$log( 'walk.id found a bound var:', x, binder) - r = visit_bound_var(x, binder, unpack(parents)) - else - --$log( 'walk.id found a free var:', x, scope.current) - r = visit_free_var(x, unpack(parents)) - end - if r then return r end - | `Function{ params, _ } -> scope:push (params, x) - | `Stat{ block, expr } -> - ------------------------------------------------------------- - -- 'expr' is in the scope of 'block': create the scope and - -- walk the block 'manually', then prevent automatic walk - -- by returning 'break'. - ------------------------------------------------------------- - scope:push() - for stat in values (block) do walk.stat(cfg, stat, x, ...) end - walk.expr(cfg, expr, x, unpack(parents)) - scope:pop() - return 'break' - | _ -> -- pass - end - - end - - ----------------------------------------------------------------------------- - -- Close the function scope opened by 'down()' - ----------------------------------------------------------------------------- - function cfg.expr.up(x, ...) - match x with `Function{...} -> scope:pop() | _ -> end - if supercfg.expr and supercfg.expr.up then supercfg.expr.up(x, ...) end - end - - ----------------------------------------------------------------------------- - -- Create a new scope and register loop variable[s] in it - ----------------------------------------------------------------------------- - function cfg.stat.down(x, ...) - -- Execute the generic statement walker; if it breaks. - -- don't do the id walking. - if supercfg.stat and supercfg.stat.down then - local r = supercfg.stat.down(x, ...) - if r then return r end - end - match x with - | `Forin{ vars, ... } -> scope:push (vars, x) - | `Fornum{ var, ... } -> scope:push ({var}, x) - | `Localrec{ vars, ... } -> scope:add (vars, x) - | `Repeat{ block, expr } -> - ------------------------------------------------------------- - -- 'expr' is in the scope of 'block': create the scope and - -- walk the block 'manually', then prevent automatic walk - -- by returning 'break'. - ------------------------------------------------------------- - scope:push() - for stat in values (block) do walk.stat(cfg, stat, x, ...) end - walk.expr(cfg, expr, x, ...) - scope:pop() - return 'break' - | _ -> -- pass - end - end - - ----------------------------------------------------------------------------- - -- Close the scopes opened by 'up()' - ----------------------------------------------------------------------------- - function cfg.stat.up(x, ...) - match x with - | `Forin{ ... } | `Fornum{ ... } -> scope:pop() - | `Local{ vars, ... } -> scope:add(vars, x) - | _ -> -- pass - -- `Repeat has no up(), because it 'break's. - end - if supercfg.stat and supercfg.stat.up then supercfg.stat.up(x, ...) end - end - - ----------------------------------------------------------------------------- - -- Create a separate scope for each block - ----------------------------------------------------------------------------- - function cfg.block.down(x, ...) - if supercfg.block and supercfg.block.down then - local r = supercfg.block.down(x, ...) - if r then return r end - end - scope:push() - end - function cfg.block.up(x, ...) - scope:pop() - if supercfg.block and supercfg.block.up then supercfg.block.up(x, ...) end - end - cfg.binder = supercfg.binder - walk[kind](cfg, ast, ...) -end - -local mt = { __index = |_,k| |...| _walk_id(k, ...) } -walk_id = setmetatable({ }, mt) diff --git a/src/lib/metalua/walk/scope.lua b/src/lib/metalua/walk/scope.lua deleted file mode 100644 index 795f8d1..0000000 --- a/src/lib/metalua/walk/scope.lua +++ /dev/null @@ -1,54 +0,0 @@ --------------------------------------------------------------------------------- --- --- Scopes: this library helps keeping track of identifier scopes, --- typically in code walkers. --- --- * scope:new() returns a new scope instance s --- --- * s:push() bookmarks the current set of variables, so the it can be --- retrieved next time a s:pop() is performed. --- --- * s:pop() retrieves the last state saved by s:push(). Calls to --- :push() and :pop() can be nested as deep as one wants. --- --- * s:add(var_list, val) adds new variable names (stirng) into the --- scope, as keys. val is the (optional) value associated with them: --- it allows to attach arbitrary information to variables, e.g. the --- statement or expression that created them. --- --- * s:push(var_list, val) is a shortcut for --- s:push(); s:add(var_list, val). --- --- * s.current is the current scope, a table with variable names as --- keys and their associated value val (or 'true') as value. --- --------------------------------------------------------------------------------- - -scope = { } -scope.__index = scope - -function scope:new() - local ret = { current = { } } - ret.stack = { ret.current } - setmetatable (ret, self) - return ret -end - -function scope:push(...) - table.insert (self.stack, table.shallow_copy (self.current)) - if ... then return self:add(...) end -end - -function scope:pop() - self.current = table.remove (self.stack) -end - -function scope:add (vars, val) - val = val or true - for i, id in ipairs (vars) do - assert(id.tag=='Id' or id.tag=='Dots' and i==#vars) - if id.tag=='Id' then self.current[id[1]] = val end - end -end - -return scope \ No newline at end of file diff --git a/src/lib/serialize.lua b/src/lib/serialize.lua deleted file mode 100644 index 659a6d7..0000000 --- a/src/lib/serialize.lua +++ /dev/null @@ -1,193 +0,0 @@ --------------------------------------------------------------------------------- --- Metalua --- Summary: Table-to-source serializer --------------------------------------------------------------------------------- --- --- Copyright (c) 2008-2009, Fabien Fleutot . --- --- This software is released under the MIT Licence, see licence.txt --- for details. --- --------------------------------------------------------------------------------- --- --- Serialize an object into a source code string. This string, when passed as --- an argument to loadstring()(), returns an object structurally identical --- to the original one. --- --- The following are supported: --- --- * strings, numbers, booleans, nil --- --- * functions without upvalues --- --- * tables thereof. There is no restriction on keys; recursive and shared --- sub-tables are handled correctly. --- --- Caveat: metatables and environments aren't saved; this might or might not --- be what you want. --------------------------------------------------------------------------------- - -local no_identity = { number=1, boolean=1, string=1, ['nil']=1 } - -function serialize (x) - - local gensym_max = 0 -- index of the gensym() symbol generator - local seen_once = { } -- element->true set of elements seen exactly once in the table - local multiple = { } -- element->varname set of elements seen more than once - local nested = { } -- transient, set of elements currently being traversed - local nest_points = { } - local nest_patches = { } - - -- Generate fresh indexes to store new sub-tables: - local function gensym() - gensym_max = gensym_max + 1 ; return gensym_max - end - - ----------------------------------------------------------------------------- - -- `nest_points' are places where a (recursive) table appears within - -- itself, directly or not. for instance, all of these chunks - -- create nest points in table `x': - -- - -- "x = { }; x[x] = 1" - -- "x = { }; x[1] = x" - -- "x = { }; x[1] = { y = { x } }". - -- - -- To handle those, two tables are created by `mark_nest_point()': - -- - -- * `nest_points [parent]' associates all keys and values in table - -- parent which create a nest_point with boolean `true' - -- - -- * `nest_patches' contains a list of `{ parent, key, value }' - -- tuples creating a nest point. They're all dumped after all the - -- other table operations have been performed. - -- - -- `mark_nest_point (p, k, v)' fills tables `nest_points' and - -- `nest_patches' with informations required to remember that - -- key/value `(k,v)' creates a nest point in parent table `p'. It - -- also marks `p' as occuring multiple times, since several - -- references to it will be required in order to patch the nest - -- points. - ----------------------------------------------------------------------------- - local function mark_nest_point (parent, k, v) - local nk, nv = nested[k], nested[v] - assert (not nk or seen_once[k] or multiple[k]) - assert (not nv or seen_once[v] or multiple[v]) - local mode = (nk and nv and "kv") or (nk and "k") or ("v") - local parent_np = nest_points [parent] - local pair = { k, v } - if not parent_np then parent_np = { }; nest_points [parent] = parent_np end - parent_np [k], parent_np [v] = nk, nv - table.insert (nest_patches, { parent, k, v }) - seen_once [parent], multiple [parent] = nil, true - end - - ----------------------------------------------------------------------------- - -- 1st pass, list the tables and functions which appear more than once in `x' - ----------------------------------------------------------------------------- - local function mark_multiple_occurences (x) - if no_identity [type(x)] then return end - if seen_once [x] then seen_once [x], multiple [x] = nil, true - elseif multiple [x] then -- pass - else seen_once [x] = true end - - if type (x) == 'table' then - nested [x] = true - for k, v in pairs (x) do - if nested[k] or nested[v] then mark_nest_point (x, k, v) else - mark_multiple_occurences (k) - mark_multiple_occurences (v) - end - end - nested [x] = nil - end - end - - local dumped = { } -- multiply occuring values already dumped in localdefs - local localdefs = { } -- already dumped local definitions as source code lines - - - -- mutually recursive functions: - local dump_val, dump_or_ref_val - - ------------------------------------------------------------------------------ - -- if `x' occurs multiple times, dump the local var rather than the - -- value. If it's the first time it's dumped, also dump the content - -- in localdefs. - ------------------------------------------------------------------------------ - function dump_or_ref_val (x) - if nested[x] then return 'false' end -- placeholder for recursive reference - if not multiple[x] then return dump_val (x) end - local var = dumped [x] - if var then return "_[" .. var .. "]" end -- already referenced - local val = dump_val(x) -- first occurence, create and register reference - var = gensym() - table.insert(localdefs, "_["..var.."]="..val) - dumped [x] = var - return "_[" .. var .. "]" - end - - ----------------------------------------------------------------------------- - -- 2nd pass, dump the object; subparts occuring multiple times are dumped - -- in local variables, which can then be referenced multiple times; - -- care is taken to dump local vars in an order which repect dependencies. - ----------------------------------------------------------------------------- - function dump_val(x) - local t = type(x) - if x==nil then return 'nil' - elseif t=="number" then return tostring(x) - elseif t=="string" then return string.format("%q", x) - elseif t=="boolean" then return x and "true" or "false" - elseif t=="function" then - return string.format ("loadstring(%q,'@serialized')", string.dump (x)) - elseif t=="table" then - - local acc = { } - local idx_dumped = { } - local np = nest_points [x] - for i, v in ipairs(x) do - if np and np[v] then - table.insert (acc, 'false') -- placeholder - else - table.insert (acc, dump_or_ref_val(v)) - end - idx_dumped[i] = true - end - for k, v in pairs(x) do - if np and (np[k] or np[v]) then - --check_multiple(k); check_multiple(v) -- force dumps in localdefs - elseif not idx_dumped[k] then - table.insert (acc, "[" .. dump_or_ref_val(k) .. "] = " .. dump_or_ref_val(v)) - end - end - return "{ "..table.concat(acc,", ").." }" - else - error ("Can't serialize data of type "..t) - end - end - - -- Patch the recursive table entries: - local function dump_nest_patches() - for _, entry in ipairs(nest_patches) do - local p, k, v = unpack (entry) - assert (multiple[p]) - local set = dump_or_ref_val (p) .. "[" .. dump_or_ref_val (k) .. "] = " .. - dump_or_ref_val (v) .. " -- rec " - table.insert (localdefs, set) - end - end - - mark_multiple_occurences (x) - local toplevel = dump_or_ref_val (x) - dump_nest_patches() - - if next (localdefs) then - -- Dump local vars containing shared or recursive parts, - -- then the main table using them. - return "local _={ }\n" .. - table.concat (localdefs, "\n") .. - "\nreturn " .. toplevel - else - -- No shared part, straightforward dump: - return "return " .. toplevel - end -end diff --git a/src/lib/strict.lua b/src/lib/strict.lua deleted file mode 100644 index 772fc10..0000000 --- a/src/lib/strict.lua +++ /dev/null @@ -1,45 +0,0 @@ --- --- strict.lua --- checks uses of undeclared global variables --- All global variables must be 'declared' through a regular assignment --- (even assigning nil will do) in a main chunk before being used --- anywhere or assigned to inside a function. --- - -local getinfo, error, rawset, rawget = debug.getinfo, error, rawset, rawget - -local mt = getmetatable(_G) -if mt == nil then - mt = {} - setmetatable(_G, mt) -end - -__strict = true -mt.__declared = {} - -local function what () - local d = getinfo(3, "S") - return d and d.what or "C" -end - -mt.__newindex = function (t, n, v) - if __strict and not mt.__declared[n] then - local w = what() - if w ~= "main" and w ~= "C" then - error("assign to undeclared variable '"..n.."'", 2) - end - mt.__declared[n] = true - end - rawset(t, n, v) -end - -mt.__index = function (t, n) - if __strict and not mt.__declared[n] and what() ~= "C" then - error("variable '"..n.."' is not declared", 2) - end - return rawget(t, n) -end - -function global(...) - for _, v in ipairs{...} do mt.__declared[v] = true end -end diff --git a/src/lib/verbose_require.lua b/src/lib/verbose_require.lua deleted file mode 100644 index 185def8..0000000 --- a/src/lib/verbose_require.lua +++ /dev/null @@ -1,11 +0,0 @@ -do - local xrequire, n, ind = require, 0, "| " - function require (x) - print(ind:rep(n).."/ require: "..x) - n=n+1 - local y = xrequire(x) - n=n-1 - print(ind:rep(n).."\\_"); - return y - end -end diff --git a/src/make.bat b/src/make.bat deleted file mode 100644 index d4e9ca6..0000000 --- a/src/make.bat +++ /dev/null @@ -1,58 +0,0 @@ -@CLS - -@REM *** Settings *** - -@REM BASE = root directory of metalua sources -@REM DISTRIB_BIN = metalua executables target directory -@REM DISTRIB_LIB = metalua libraries target directory, can be an existing path referenced in LUA_PATH -@REM LUA, LUAC = Lua executables, provided by metalua by default. - -@REM --- BEGINNING OF USER-EDITABLE PART --- - -@set BASE=%CD% -@set DISTRIB=%BASE%\..\distrib -@set DISTRIB_BIN=%DISTRIB%\bin -@set DISTRIB_LIB=%DISTRIB%\lib -@set LUA=%DISTRIB_BIN%\lua -@set LUAC=%DISTRIB_BIN%\luac - -@REM --- END OF USER-EDITABLE PART --- - - -@REM *** Create the distribution directories, populate them with lib sources *** - -@set LUA_PATH=?.luac;?.lua;%DISTRIB_LIB%\?.luac;%DISTRIB_LIB%\?.lua -@set LUA_MPATH=?.mlua;%DISTRIB_LIB%\?.mlua - -mkdir %DISTRIB% -mkdir %DISTRIB_BIN% -mkdir %DISTRIB_LIB% -xcopy /y /s lib %DISTRIB_LIB% -xcopy /y /s bin %DISTRIB_BIN% - -@REM *** Generate a callable batch metalua.bat script *** - -echo @set LUA_PATH=?.luac;?.lua;%DISTRIB_LIB%\?.luac;%DISTRIB_LIB%\?.lua > %DISTRIB_BIN%\metalua.bat -echo @set LUA_MPATH=?.mlua;%DISTRIB_LIB%\?.mlua >> %DISTRIB_BIN%\metalua.bat -echo @%LUA% %DISTRIB_LIB%\metalua.luac %%* >> %DISTRIB_BIN%\metalua.bat - - -@REM *** Compiling the parts of the compiler written in plain Lua *** - -cd compiler -%LUAC% -o %DISTRIB_LIB%\metalua\bytecode.luac lopcodes.lua lcode.lua ldump.lua compile.lua -%LUAC% -o %DISTRIB_LIB%\metalua\mlp.luac lexer.lua gg.lua mlp_lexer.lua mlp_misc.lua mlp_table.lua mlp_meta.lua mlp_expr.lua mlp_stat.lua mlp_ext.lua -cd .. - -@REM *** Bootstrap the parts of the compiler written in metalua *** - -%LUA% %BASE%\build-utils\bootstrap.lua %BASE%\compiler\mlc.mlua output=%DISTRIB_LIB%\metalua\mlc.luac -%LUA% %BASE%\build-utils\bootstrap.lua %BASE%\compiler\metalua.mlua output=%DISTRIB_LIB%\metalua.luac - -@REM *** Finish the bootstrap: recompile the metalua parts of the compiler with itself *** - -call %DISTRIB_BIN%\metalua -vb -f compiler\mlc.mlua -o %DISTRIB_LIB%\metalua\mlc.luac -call %DISTRIB_BIN%\metalua -vb -f compiler\metalua.mlua -o %DISTRIB_LIB%\metalua.luac - -@REM *** Precompile metalua libraries *** -%LUA% %BASE%\build-utils\precompile.lua directory=%DISTRIB_LIB% command=%DISTRIB_BIN%\metalua diff --git a/src/make.sh b/src/make.sh deleted file mode 100755 index 188af3d..0000000 --- a/src/make.sh +++ /dev/null @@ -1,128 +0,0 @@ -#! /bin/sh - -# --- BEGINNING OF USER-EDITABLE PART --- - -# Metalua sources -BASE=${PWD} - -# Temporary building location. -# Upon installation, everything will be moved to ${INSTALL_LIB} and ${INSTALL_BIN} - -if [ -z "${BUILD}" ]; then - BUILD=$(mkdir -p ../build; cd ../build; pwd) -fi - -if [ -z "${BUILD_BIN}" ]; then - BUILD_BIN=${BUILD}/bin -fi - -if [ -z "${BUILD_LIB}" ]; then - BUILD_LIB=${BUILD}/lib -fi - -# Where to place the final results -# DESTDIR= -# INSTALL_BIN=/usr/local/bin -# INSTALL_LIB=/usr/local/lib/lua/5.1 -if [ -z "${INSTALL_BIN}" ]; then - INSTALL_BIN=~/local/bin -fi - -if [ -z "${INSTALL_LIB}" ]; then - INSTALL_LIB=~/local/lib/lua -fi - -# Where to find Lua executables. -# On many Debian-based systems, those can be installed with "sudo apt-get install lua5.1" -LUA=$(which lua) -LUAC=$(which luac) - -# --- END OF USER-EDITABLE PART --- - -if [ -z ${LUA} ] ; then echo "Error: no lua interpreter found"; fi -if [ -z ${LUAC} ] ; then echo "Error: no lua compiler found"; fi - -if [ -f ~/.metaluabuildrc ] ; then . ~/.metaluabuildrc; fi - -if [ -z "$LINEREADER" ] && which -s rlwrap; then LINEREADER=rlwrap; fi - -echo '*** Lua paths setup ***' - -export LUA_PATH="?.luac;?.lua;${BUILD_LIB}/?.luac;${BUILD_LIB}/?.lua" -export LUA_MPATH="?.mlua;${BUILD_LIB}/?.mlua" - -echo '*** Create the distribution directories, populate them with lib sources ***' - -mkdir -p ${BUILD_BIN} -mkdir -p ${BUILD_LIB} -cp -Rp lib/* ${BUILD_LIB}/ -# cp -Rp bin/* ${BUILD_BIN}/ # No binaries provided for unix (for now) - -echo '*** Generate a callable metalua shell script ***' - -cat > ${BUILD_BIN}/metalua < make-install.sh < ${DESTDIR}${INSTALL_BIN}/metalua < local foo = if bar then 1 else 2 --- --- Indeed, an expression is expected at the right of the equal, and "if ..." is --- a statement, which expects nested statements as "then" and "else" clauses. --- The example above must therefore be written: --- --- > local foo --- > if bar then foo=1 else foo=2 end --- --- --- Let's allow if-then-[elseif*]-[else] constructs to be used in an expression's --- context. In such a context, 'then' and 'else' are expected to be followed by --- expressions, not statement blocks. --- --- Stuff you probably need to understand, at least summarily, to follow this --- code: --- * Lua syntax --- * the fact that -{ ... } switches metalua into compile time mode --- * mlp, the dynamically extensible metalua parser, which will be extended with --- the new construct at compile time. --- * gg, the grammar generator that allows to build and extend parsers, and with --- which mlp is implemented. --- * the fact that code can be interchangeably represented as abstract syntax --- trees with `Foo{ bar } notations (easy to manipulate) or as quotes inside a --- +{ ... } (easy to read). --- ----------------------------------------------------------------------------------- - - ----------------------------------------------------------------------------------- --- How to turn this file in a proper syntax extension. --- =================================================== --- --- To turn this example's metalevel 0 code into a regular extension: --- * Put everything inside the -{block: ... } in a separate .mlua file; --- * save it in a directory called 'extension', which is itself --- in your $LUA_MPATH. For instance, if your $LUA_MPATH contains --- '~/local/metalua/?.mlua', you can save it as --- '~/local/metalua/extension-compiler/ifexpr.mlua' --- * Load the extension with "-{ extension 'ifexpr' }", whenever you want to --- use it. ----------------------------------------------------------------------------------- - --{ block: -- Enter metalevel 0, where we'll start hacking the parser. - - ------------------------------------------------------------------------------- - -- Most extension implementations are cut in two parts: a front-end which - -- parses the syntax into some custom tree, and a back-end which turns that - -- tree into a compilable AST. Since the front-end calls the back-end, the - -- later has to be declared first. - ------------------------------------------------------------------------------- - - ------------------------------------------------------------------------------- - -- Back-end: - -- ========= - -- This is the builder that turns the parser's result into an expression AST. - -- Local vars: - -- ----------- - -- elseifthen_list : list of { condition, expression_if_true } pairs, - -- opt_else: either the expression in the 'else' final clause if any, - -- or false if there's no else clause. - -- v: the variable in which the result will be stored. - -- ifstat: the if-then-else statement that will be generated from - -- then if-then-else expression, then embedded in a `Stat{} - -- - -- The builder simply turns all expressions into blocks, so that they fit in - -- a regular if-then-else statement. Then the resulting if-then-else is - -- embedded in a `Stat{ } node, so that it can be placed where an expression - -- is expected. - -- - -- The variable in which the result is stored has its name generated by - -- mlp.gensym(). This way we're sure there will be no variable capture. - -- When macro hygiene problems are more complex, it's generally a good - -- idea to give a look at the extension 'H'. - ------------------------------------------------------------------------------- - local function builder (x) - local elseifthen_list, opt_else = unpack (x) - - local v = mlp.gensym 'ife' -- the selected expr will be stored in this var. - local ifstat = `If{ } - for y in ivalues (elseifthen_list) do - local cond, val = unpack (y) - table.insert (ifstat, cond) - table.insert (ifstat, { `Set{ {v}, {val} } }) -- change expr into stat. - end - if opt_else then -- the same for else clause, except that there's no cond. - table.insert (ifstat, { `Set{ {v}, {opt_else} } }) - end - return `Stat{ +{block: local -{v}; -{ifstat}}, v } - end - - ------------------------------------------------------------------------------- - -- Front-end: - -- ========== - -- This is mostly the same as the regular if-then-else parser, except that: - -- * it's added to the expression parser, not the statement parser; - -- * blocks after conditions are replaced by exprs; - -- - -- In Lua, 'end' traditionally terminates a block, not an - -- expression. Should there be a 'end' to respect if-then-else - -- usual syntax, or should there be none, to respect usual implicit - -- expression ending? I chose not to put an 'end' here, but other people - -- might have other tastes... - ------------------------------------------------------------------------------- - mlp.expr:add{ name = 'if-expression', - 'if', - gg.list { gg.sequence{mlp.expr, "then", mlp.expr}, separators="elseif" }, - gg.onkeyword{ 'else', mlp.expr }, - builder = builder } - -} -- Back to metalevel 1, with the new feature enabled - -local foo, bar - ------------------------------------------------------------- --- The parser will read this as: --- { { { `Id 'foo', `Number 1 }, --- { `Id 'bar', `Number 2 } }, --- `Number 3 }, --- then feed it to 'builder', which will turn it into an AST ------------------------------------------------------------- - -local x = if false then 1 elseif bar then 2 else 3 - ------------------------------------------------------------- --- The result after builder will be: --- `Stat{ +{block: local $v$ --- if foo then $v$ = 1 --- elseif bar then $v$ = 2 --- else $v$ = 3 --- end }, `Id "$v$" } ------------------------------------------------------------- - -assert (x == 3) -print "It seems to work..." \ No newline at end of file diff --git a/src/samples/lex_switch_test.mlua b/src/samples/lex_switch_test.mlua deleted file mode 100644 index 84b615c..0000000 --- a/src/samples/lex_switch_test.mlua +++ /dev/null @@ -1,40 +0,0 @@ --- This is a simple and somewhat stupid example of how to switch --- lexers dynamically. Behind a V, X and Y are the only reserved --- keywords. In normal conditions, X and Y aren't keywords and can be --- used as variables. - --{ block: - require 'lexer' - local my_lexer = lexer.lexer:clone() -- no keywords - my_lexer:add{"X", "Y"} - mlp.lexer:add "V" - - function num(lx) - local a = lx:next() - assert(a.tag=='Number') - return a - end - - my_parser = gg.list{ - gg.multisequence{ - { "X", num, builder = |x| `Table{ x[1], +{0} } }, - { "Y", num, builder = |y| `Table{ +{0}, y[1] } }, - default = gg.sequence{ mlp.id, builder = |x| `Pair{ `String{x[1][1]},`True } } }, - separators = { ',', ';' }, - builder = function(l) l.tag='Table'; return l end } - - mlp.expr:add{ "V", gg.with_lexer(my_lexer, my_parser), builder = unpack } } - --- Use the special lexer: -foo = V X 1, Y 2, X 3, - for, foo, in, tag, function -- check that these aren't keywords in my_lexer - --- Use X and Y as Id, in the unpolluted lexer: -print "Vector:" -X = table.tostring(foo, 60) -print (X) - -print "Sum:" -- Ready for a functional one-liner? :) -Y = |v| table.ifold (|a,b| table.imap (|c,d| c+d, a, b), {0,0}, v) -table.print (Y(foo)) - diff --git a/src/samples/match_test.mlua b/src/samples/match_test.mlua deleted file mode 100644 index 1d743b7..0000000 --- a/src/samples/match_test.mlua +++ /dev/null @@ -1,86 +0,0 @@ --{extension 'match'} - -WIDTH = 50 -function p(msg) - io.write(msg, ' ':rep(WIDTH-#msg)) - io.flush() -end - -p "Basic match" -match 1 with 1 -> print 'ok' end - -p "Sequence match" -match 3, 4 with -| 1, 2 -> print 'KO' -| 3, 4 -> print 'ok' -end - -p "Id binding" -match 3, 4 with -| 1, 2 -> print 'KO' -| x, y -> print 'ok' -end - -p "Table destructuring & non-litteral tested term" -match {1, 2} with -|{a, 2} -> assert(a==1); print 'ok' -end - -p "Pattern group" -match {'?'} with -|1|2|3 -> print 'KO' -|{...} -> print 'ok' -end - -p "Multi-level destructuring" -match {{1000}} with -|{{2000}} -> print 'KO' -|{{3000}} -> print 'KO' -|{{1000}} -> print 'ok' -end - -p "Guard" -match 1 with -| 1 if false -> print 'KO' -| 1 -> print 'ok' -end - -p "Guard with bound var" -match 1 with -| a if a ~= 1 -> print 'KO' -| a if a == 1 -> print 'ok' -end - -p "Non linear var & destructuring" -match {1, {2}} with -| {a, {a}} -> print 'KO' -| {a, {b}} -> print 'ok' -end - -p "Non-linear vars on a sequence" -match 1, 2 with -| a, a -> print 'KO' -| a, b -> print 'ok' -end - -p "Multiple _ wildcards" -match 1, 2 with -| _, _ -> print 'ok' -| a, b -> print 'KO' -end - -p "Regexp & non-linear vars" -match 'toto' with -| 't(.)t(.)' / { a, a } -> print (a..'k') -end - -p "Nested match & ..." -match { { 'o', 'k', '!' } } with -| { t } -> match t with - | { a, b } -> print 'KO' - | { a, b, ... } -> print (a..b) - | _ -> print 'KO' - end -| _ -> print 'KO' -end - diff --git a/src/samples/metalint/INSTALL.TXT b/src/samples/metalint/INSTALL.TXT deleted file mode 100644 index 600fdff..0000000 --- a/src/samples/metalint/INSTALL.TXT +++ /dev/null @@ -1,22 +0,0 @@ -Metalint 0.1 - INSTALL.TXT -========================== - -Metalint is a regular Metalua program, and relies on Metalua compilation -libraries. You must therefore have a working Metalua installation on your -system. You can run it with: "metalua metalint.mlua -- ". -For instance, to check metalint itself: - - ~/src/metalua/src/sandbox$ metalua metalint.mlua -- metalint.mlua - File metalint.mlua checked successfully - ~/src/metalua/src/sandbox$ - -You can also precompile it: - - ~/src/metalua/src/sandbox$ metalua metalint.mlua -s '#!/usr/bin/env lua' -o metalint - ~/src/metalua/src/sandbox$ ./metalint lint.mlua - File lint.mlua checked successfully - ~/src/metalua/src/sandbox$ - -Beware that even when precompiled, it still requires the Metalua runtime libs in LUA_PATH. - -Don't forget to set the LUA_DPATH environment variable! \ No newline at end of file diff --git a/src/samples/metalint/LICENCE.TXT b/src/samples/metalint/LICENCE.TXT deleted file mode 100644 index 41937b8..0000000 --- a/src/samples/metalint/LICENCE.TXT +++ /dev/null @@ -1,27 +0,0 @@ -Metalint - -Copyright (c) 2006-2008 Fabien Fleutot - -Metalint is available under the MIT licence. - -MIT License -=========== - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/samples/metalint/README.TXT b/src/samples/metalint/README.TXT deleted file mode 100644 index 2a6f160..0000000 --- a/src/samples/metalint/README.TXT +++ /dev/null @@ -1,159 +0,0 @@ -Metalint 0.2 - README.TXT -========================= - -Metalint is a utility that checks Lua and Metalua source files for global -variables usage. Beyond checking toplevel global variables, it also checks -fields in modules: for instance, it will catch typos such as taable.insert(), -both also table.iinsert(). - -Metalint works with declaration files, which list which globals are declared, -and what can be done with them. The syntax is: - -DECL ::= (DECL_ELEM ";"?) * -DECL_ELEM ::= NAME | "module" NAME DECL "end" | "free" NAME | "private" DECL_ELEM -NAME ::= | - -Identifiers and strings are the same as in Lua, except that the only reserved -keywords are "free", "module", "end" and "private". A variable name can be -equivalently specified as a string or as an identifier. Lua comments are allowed -in declaration files, short and long. Check for *.dlua files in the distribution -for examples. - -Meaning of declaration elements: - -- Standalone names declare the existence of a variable. This variable is not a - module, i.e. people must not extract fields from it. For instance, the - function ipairs() will simply be declared as: "ipairs". With this declaration, - it's an error to write, for instance, "ipairs.some_field". - -- Names preceded with "free" can be used as you want, including arbitrary - sub-indexing. This is useful for global tables not used as modules, and for - modules you're too lazy to fully declare. For instance, the declaration "free - _G" allows you to bypass all checkings, as long as you access stuff through _G - rather than directly (i.e. "table.iinsert" will fail, but "_G.table.iinsert" - will be accepted). - -- modules contain field declarations. For instance, the contents of the standard - "os" module will be declared as "module os exit ; setlocale; date; [...] - execute end". - -Declaration files are loaded: - -- manually, by passing "-f filename", "-l libname" or "-e - decl_literal_expression" as options to the checking program. Options are - processed in order, i.e. if you load a library after a file name to check, - this library won't be accessible while checking the dource file. - -- automatically, when a call to "require()" is found in the code. - -- declaration library "base" is automatically loaded. - -Declaration library files are retrieved with the same algorithm as for Lua -libraries, except that the pattern string is taken from environment variable -LUA_DPATH rather than LUA_PATH or LUA_CPATH. For instance, if -LUA_DPATH="./?.dlua" and a "require 'walk.id'" is found, the checker will -attempt to load "./walk/id.dlua". It won't fail if it can't find it, but then, -attempts to use globals declared by walk.id are likely to fail. - -The metalua base libraries, which include Lua base libraries, can be found in -base.dlua. They're automatically loaded when you run metalint. - -Limitations: if you try to affect custom names to modules, e.g. "local -wi=require 'walk.id'", the checker won't be able to check your usage of -subfields of "wi". Similarly, if you redefine require() or module(), or create -custom versions of these, metalint will be lost. Finally, computed access to -modules are obviously not checked, i.e. "local x, y = 'iinsert', { }; -table[x](y, 1)" will be accepted. - -Future: Metalint is intended to support richer static type checkings, including -function argument types. The idea is not to formally prove type soundness, but -to accelerate the discovery of many silly bugs when using a (possibly third -party) library. However, to perform interesting checks, the type declaration -system must support a couple of non-trivial stuff like union types and higher -order functions. Moreover, runtime checking code could optionally be inserted to -check that a function API is respected when it's called (check the types -extension in Metalua). Stay tuned. - -Notice that metalint can easily be turned into a smarter variable localizer, -which would change references to module elements into local variables. -For instance, it would add "local _table_insert = table.insert" at the beginning -of the file, and change every instance of "table.insert" into a reference to the -local variable. This would be much more efficient than simply adding a "local -table=table". - - - -Finally, to accelerate the migration of existing codebases, a decl_dump() -function is provided with metalint, which attempts to generate a declaration for -a module currently loaded in RAM. The result is not always perfect, but remains -a serious time saver: - -~/src/metalua/src/sandbox$ metalua -Metalua, interactive REPLoop. -(c) 2006-2008 -M> require "metalint" -M> require "walk" -M> decl_dump ("walk", "decl/walk.dlua") -M> ^D -~/src/metalua/src/sandbox$ cat decl/walk.dlua -module walk - debug; - module tags - module stat - Forin; - Do; - Set; - Fornum; - Invoke; - While; - Break; - Call; - Label; - Goto; - Local; - If; - Repeat; - Localrec; - Return; - end; - module expr - True; - String; - Index; - Paren; - Id; - False; - Invoke; - Function; - Op; - Number; - Table; - Dots; - Nil; - Stat; - Call; - end; - end; - expr_list; - binder_list; - guess; - expr; - block; - module traverse - expr; - block; - stat; - expr_list; - end; - stat; -end; - -NEW SINCE 0.1: -============== - -Feature-wise, option -a replaces all references to declared fields with locals -and stores the compiled result in a .luac compiled file - -Architecture-wise, the system now remembers where (i.e. by which require() -statement, if applicable) a given field has been declared. This is necessary for -the autolocal feature to work correctly. \ No newline at end of file diff --git a/src/samples/metalint/dlua/base.dlua b/src/samples/metalint/dlua/base.dlua deleted file mode 100644 index 37e9afc..0000000 --- a/src/samples/metalint/dlua/base.dlua +++ /dev/null @@ -1,203 +0,0 @@ -rawtype; -gcinfo; -module os - exit; - setlocale; - date; - getenv; - difftime; - remove; - time; - clock; - tmpname; - rename; - execute; -end; -o; -getfenv; -const; -pairs; -max; -tonumber; -module io - lines; - write; - close; - flush; - open; - output; - type; - read; - stderr; - stdin; - input; - stdout; - popen; - tmpfile; -end; -load; -"module"; -free _G; -rawpairs; -module coroutine - resume; - yield; - status; - wrap; - create; - running; -end; -rawipairs; -loadstring; -module string - split; - match; - gmatch; - upper; - gsub; - format; - lower; - sub; - gfind; - find; - char; - dump; - undump; - reverse; - byte; - strmatch; - len; - rep; -end; -module metalua - version; - ext_compiler_prefix; - ext_runtime_prefix; -end; -module package - path; - metalua_loader; - cpath; - findfile; - free preload; - free loaders; - config; - free loaded; - loadlib; - mpath; - seeall; -end; -module table - shallow_copy; - iforeach; - tostring; - getn; - foreachi; - foreach; - sort; - ifold; - print; - icat; - isub; - transpose; - iany; - override; - imap; - izip; - range; - deep_copy; - cat; - iall; - maxn; - remove; - concat; - iflatten; - irev; - ifilter; - setn; - insert; -end -min; -printf; -require; -unpack; -global; -setmetatable; -next; -ipairs; -parser; -rawequal; -collectgarbage; -arg; -newproxy; -values; -xpcall; -rawset; -keys; -tostring; -print; -dostring; -decl_builder; -module math - log; - max; - acos; - huge; - ldexp; - pi; - cos; - tanh; - pow; - deg; - tan; - cosh; - sinh; - random; - randomseed; - frexp; - ceil; - floor; - rad; - abs; - sqrt; - modf; - asin; - min; - mod; - fmod; - log10; - atan2; - exp; - sin; - atan; -end -lua_loadstring; -pcall; -assert; -type; -getmetatable; -select; -ivalues; -rawget; -id; -setfenv; -module debug - getupvalue; - debug; - sethook; - getmetatable; - gethook; - setmetatable; - setlocal; - traceback; - setfenv; - getinfo; - setupvalue; - getlocal; - getregistry; - getfenv; -end -module strict end -dofile; -error; -loadfile; diff --git a/src/samples/metalint/dlua/clopts.dlua b/src/samples/metalint/dlua/clopts.dlua deleted file mode 100644 index 2e02c63..0000000 --- a/src/samples/metalint/dlua/clopts.dlua +++ /dev/null @@ -1 +0,0 @@ -free clopts; \ No newline at end of file diff --git a/src/samples/metalint/dlua/metalua/compiler.dlua b/src/samples/metalint/dlua/metalua/compiler.dlua deleted file mode 100644 index 5a1b0fd..0000000 --- a/src/samples/metalint/dlua/metalua/compiler.dlua +++ /dev/null @@ -1,402 +0,0 @@ -module lexer - module lexer - save; - newstream; - extract_long_string; - extract_word; - extract_short_string; - clone; - free __index; - is_keyword; - peek; - module sym - end; - extract; - next; - restore; - module extractors - end; - module alpha - end; - sync; - takeover; - module patterns - spaces; - number_exponant; - word; - long_string; - short_comment; - long_comment; - module number_mantissa - end; - final_short_comment; - end; - extract_symbol; - skip_whitespaces_and_comments; - check; - extract_number; - module token_metatable - end; - add; - end; - free _M; - _NAME; - _PACKAGE; -end; - -module gg - sequence; - _PACKAGE; - e; - is_parser; - with_lexer; - optkeyword; - onkeyword; - make_parser; - _NAME; - list; - expr; - free _M; - multisequence; - parse_error; -end; - -module bytecode - MAXPARAMS; - metalua_compile; - dump_file; - dump_string; - VARARG_ISVARARG; - indexupvalue; - MAX_INT; - free _M; - module luaU - LUA_TSTRING; - DumpBlock; - DumpByte; - DumpProtos; - DumpCode; - LUA_TNIL; - endianness; - LUA_TBOOLEAN; - DumpConstants; - DumpInt; - DumpDebug; - DumpLiteral; - DumpNumber; - from_int; - DumpSize; - LUA_TNUMBER; - DumpHeader; - dump; - DumpString; - ttype; - make_setS; - DumpUpvalues; - DumpLines; - DumpLocals; - make_setF; - LUA_TNONE; - DumpFunction; - from_double; - end; - LUA_MAXPARSERLEVEL; - VARARG_NEEDSARG; - module luaK - infix; - codenot; - NO_JUMP; - indexed; - checkstack; - dischargejpc; - fixline; - concat; - exp2reg; - code; - code_label; - exp2val; - sethvalue; - jumponcond; - prefix; - jump; - condjump; - ttisnumber; - exp2anyreg; - exp2RK; - setsvalue; - setnilvalue; - _nil; - exp2nextreg; - getjump; - codeAsBx; - addk; - need_value; - freeexp; - posfix; - nilK; - discharge2reg; - storevar; - setmultret; - setlist; - codeABx; - MAXSTACK; - codeABC; - freereg; - reserveregs; - codecomp; - dischargevars; - hasjumps; - setnvalue; - module arith_opc - sub; - mul; - not; - len; - pow; - div; - mod; - add; - end; - patchlist; - constfolding; - getlabel; - module test_opc - module ne - cond; - opc; - end; - module eq - cond; - opc; - end; - module ge - cond; - opc; - end; - module gt - cond; - opc; - end; - module le - cond; - opc; - end; - module lt - cond; - opc; - end; - end; - getjumpcontrol; - patchtohere; - LUA_MULTRET; - codearith; - boolK; - fixjump; - ret; - nvalue; - goiffalse; - isnumeral; - patchlistaux; - discharge2anyreg; - setoneret; - patchtestreg; - removevalues; - getcode; - _self; - goiftrue; - numberK; - setcallreturns; - invertjump; - setreturns; - stringK; - end; - module luaP - MAXARG_C; - SETARG_C; - MAXARG_A; - SETARG_sBx; - MAXARG_sBx; - MAXARG_Bx; - GETARG_A; - GETARG_C; - GETARG_sBx; - OpModeT; - POS_C; - GET_OPCODE; - SIZE_B; - module OpCode - OP_GETTABLE; - OP_GETGLOBAL; - OP_NOT; - OP_MOD; - OP_LOADK; - OP_TAILCALL; - OP_TEST; - OP_TESTSET; - OP_LE; - OP_GETUPVAL; - OP_CALL; - OP_SETTABLE; - OP_LT; - OP_POW; - OP_ADD; - OP_EQ; - OP_SETLIST; - OP_CONCAT; - OP_JMP; - OP_SETGLOBAL; - OP_CLOSE; - OP_SETUPVAL; - OP_NEWTABLE; - OP_DIV; - OP_LEN; - OP_CLOSURE; - OP_SELF; - OP_TFORLOOP; - OP_MUL; - OP_FORPREP; - OP_MOVE; - OP_LOADBOOL; - OP_FORLOOP; - OP_SUB; - OP_LOADNIL; - OP_RETURN; - OP_UNM; - OP_VARARG; - end; - MAXARG_B; - SETARG_A; - testOpMode; - SIZE_OP; - OpModeK; - module ROpCode - end; - SET_OPCODE; - NO_REG; - ISK; - module opnames - end; - MAXINDEXRK; - getOpMode; - SIZE_C; - RKASK; - OpModesetA; - SETARG_Bx; - OpModeCrk; - OpModeBrk; - OpModeBreg; - NUM_OPCODES; - LFIELDS_PER_FLUSH; - DecodeInst; - Instruction; - SETARG_B; - CREATE_ABC; - CREATE_ABx; - GETARG_B; - module OpMode - end; - POS_A; - POS_B; - POS_Bx; - module opmodes - end; - SIZE_A; - BITRK; - SIZE_Bx; - GETARG_Bx; - end; - _NAME; - VARARG_HASARG; - MAXUPVALUES; - _PACKAGE; - MAXVARS; - module format - number_size; - instr_size; - header; - little_endian; - int_size; - size_t_size; - integral; - end; -end; - -module mlc - - luastring_of_luafile; - lexstream_of_luafile; - ast_of_luafile; - proto_of_luafile; - luacstring_of_luafile; - function_of_luafile; - - lexstream_of_luastring; - ast_of_luastring; - proto_of_luastring; - luacstring_of_luastring; - function_of_luastring; - - ast_of_lexstream; - proto_of_lexstream; - luacstring_of_lexstream; - function_of_lexstream; - - proto_of_ast; - luacstring_of_ast; - function_of_ast; - - luacstring_of_proto; - function_of_proto; - - function_of_luacstring; - - luafile_to_luastring; - luafile_to_lexstream; - luafile_to_ast; - luafile_to_proto; - luafile_to_luacstring; - luafile_to_function; - - luastring_to_lexstream; - luastring_to_ast; - luastring_to_proto; - luastring_to_luacstring; - luastring_to_function; - - lexstream_to_ast; - lexstream_to_proto; - lexstream_to_luacstring; - lexstream_to_function; - - ast_to_proto; - ast_to_luacstring; - ast_to_function; - - proto_to_luacstring; - proto_to_function; - - luacstring_to_function; - - luacstring_of_function; - function_to_luacstring; - - convert; - module order - function; - luafile; - luacstring; - proto; - lexstream; - luastring; - ast; - end; - __index; -end; -extension; -module mlp -end - -module metalua - module compiler - end -end \ No newline at end of file diff --git a/src/samples/metalint/dlua/walk.dlua b/src/samples/metalint/dlua/walk.dlua deleted file mode 100644 index 86939b8..0000000 --- a/src/samples/metalint/dlua/walk.dlua +++ /dev/null @@ -1,3 +0,0 @@ -module walk - expr; block; stat; expr_list; guess -end \ No newline at end of file diff --git a/src/samples/metalint/dlua/walk/id.dlua b/src/samples/metalint/dlua/walk/id.dlua deleted file mode 100644 index 7545b34..0000000 --- a/src/samples/metalint/dlua/walk/id.dlua +++ /dev/null @@ -1,3 +0,0 @@ -module walk_id - expr; block; stat; expr_list; guess -end \ No newline at end of file diff --git a/src/samples/metalint/metalint.dlua b/src/samples/metalint/metalint.dlua deleted file mode 100644 index 85f2823..0000000 --- a/src/samples/metalint/metalint.dlua +++ /dev/null @@ -1,10 +0,0 @@ -free decl_lexer; -- I want to access its alpha symbols table -decl_builder; -free decl_parser; -free decl_elem_parser; -parse_decl_lib; -parse_decl_expr; -parse_decl_file; -check_src_file; -decl_dump; -free clopts_cfg; \ No newline at end of file diff --git a/src/samples/metalint/metalint.mlua b/src/samples/metalint/metalint.mlua deleted file mode 100644 index e70f1f9..0000000 --- a/src/samples/metalint/metalint.mlua +++ /dev/null @@ -1,294 +0,0 @@ --{ extension 'match' } --{ extension 'log' } - -require 'strict' -require 'metalua.compiler' - -local VERBOSE = false -local PARSING_OWN_DECL = false -local MY_GLOBALS = { } -local LOAD_SOURCE = nil -local DECLARATIONS = { } -local AUTOLOCALS = { } - - -local function debug_print(...) - if VERBOSE then return printf(...) end -end - --- Lexer -- -decl_lexer = lexer.lexer:clone() -decl_lexer:add{ 'module', 'free', 'end', 'private' } - --- Parser -- - --- Merge two decl together -local function merge (x, y) - --$log('merge', x, y) - for k, v in pairs (y) do - match x[k], v with - | `Free, _ | `Atom{x}, `Atom{x} -> -- pass - | _, `Free | nil, _ -> x[k] = v - | `Module{ _, mod_x }, `Module{ _, mod_y } -> merge (mod_x, mod_y) - | _, _ -> - $log("Merge failure", x[k], v) - error ("Can't merge type elements") - end - end -end - --- break mutual dependency between decl_elem_parser and decl_parser -local _decl_elem_parser = |...| decl_elem_parser(...) - --- Parse a name, presented as an `Id or a `String -local function name(lx) - local a = lx:next() - if a.tag=='String' or a.tag=='Id' then return a[1] - else error("Name expected, got "..table.tostring(a,'nohash')) end -end - -function decl_builder(x) - --$log('decl_builder', x) - local r = { } - for y in ivalues(x) do - if y.tag ~= 'Private' then merge (r, {[y[1]]=y}) end - end - return r -end - -decl_parser = gg.list{ - gg.sequence{ _decl_elem_parser, gg.optkeyword ';', builder = |x|x[1] }, - terminators = 'end', builder = decl_builder } - -decl_elem_parser = gg.multisequence{ - { 'module', name, decl_parser, 'end', builder = |x| `Module{x[1], x[2]} }, - { 'free', name, builder = |x| `Free{x[1]} }, - { 'private', _decl_elem_parser, builder = |x| PARSING_OWN_DECL and x[1] or `Private }, - default = gg.sequence{ name, builder = |x| `Atom{x[1]} } } - -decl_elem_parser.transformers:add (function(x) x.loader = LOAD_SOURCE end) - -function parse_decl_lib (libname) - debug_print ("Loading decl lib "..libname) - local fd, msg = package.findfile (libname, os.getenv 'LUA_DPATH' or "?.dlua") - if not fd then error ("Can't find declaration file for "..libname) end - local src = fd:read '*a' - fd:close() - return parse_decl_expr (src) -end - -function parse_decl_expr (src) - local lx = decl_lexer:newstream (src) - local r = decl_parser (lx) - --$log('result of parse_decl', r) - merge(DECLARATIONS, r) - return r -end - -function parse_decl_file (filename) - debug_print ("Loading decl file "..filename) - local src = mlc.luastring_of_luafile (filename) - return parse_decl_expr (src) -end - --- AST checker -- -require 'walk.id' - -local function index_autolocal (e, loader) - --$log('index_autolocals', loader) - local is_mine = false - local function get_name(x) - match x with - | `Index{ y, `String{key} } -> return get_name(y)..'~'..key - | `Invoke{ y, `String{key}, _ } -> - error('autolocals for invocation not implemented '..table.tostring(x)) - | `Id{ name } -> is_mine = MY_GLOBALS[name]; return '~'..name - | _ -> error(table.tostring(x)..'\n') - end - end - local name = get_name(e) - if is_mine then return end -- Don't index my own global vars - local x = AUTOLOCALS[name] - if not x then x={ }; AUTOLOCALS[name] = x end - table.insert(x, { e, loader }) -end - -local walk_cfg = { id = { }, stat = { }, expr = { } } - -function walk_cfg.id.free(x, ...) - --$log('in free id walker', x) - local parents = {...} - local dic = DECLARATIONS - local name = x[1] - for p in ivalues (parents) do - local decl = dic[name] - if not decl then error("Not declared: "..name) end - match p with - | `Index{ _x, `String{n} } | `Invoke{ _x, `String{n}, ...} if _x==x -> - match decl with - | `Free{...} -> break - | `Atom{...} -> error (name.." is not a module") - | `Module{ _, dic2 } -> dic, name, x = dic2, n, p - end - | _ -> -- x == last checked variable - debug_print("Checked "..table.tostring(x, 'nohash').. - ", found in "..table.tostring(decl.loader, 'nohash')) - index_autolocal (x, decl.loader) - break - end - end -end - -local function try_load_decl (kind, mod_name) - local success, r = pcall(_G['parse_decl_'..kind], mod_name) - if not success then - debug_print("Warning, error when trying to load %s:\n%s", mod_name, r) - else - return r - end -end - -local function call_walker(x) - --$log('in call walker', x) - match x with - | `Call{ `Id 'require', `String{ mod_name } } -> - if not DECLARATIONS[mod_name] then - LOAD_SOURCE = `Require{x} - try_load_decl('lib', mod_name) - end - | `Module{ `Id 'module', _ } -> -- no package.seeall - DECLARATIONS = { } -- reset declarations - | _ -> -- pass - end -end - -walk_cfg.expr.down = call_walker -walk_cfg.stat.down = call_walker - -local CHECKED_AST, CHECKED_NAME - -function check_src_file(name) - debug_print ("Checking file "..name) - CHECKED_NAME = name - CHECKED_AST = mlc.ast_of_luafile (name) - --$log(ast,'nohash') - PARSING_OWN_DECL = true - local x = try_load_decl('lib', name:gsub("%.m?lua$", "")) - for name in keys(x) do MY_GLOBALS[name] = true end - PARSING_OWN_DECL = false - walk_id.block (walk_cfg, CHECKED_AST) - printf("File %s checked successfully", name) -end - -local function replace_autolocals () - local top_defs, req_defs = { }, { } - for k, v in pairs (AUTOLOCALS) do - local original = table.shallow_copy(v[1][1]) - local loader = v[1][2] - match loader with - | `Require{ r } -> - local defs = req_defs[r] - if not defs then defs={ }; req_defs[r]=defs end - defs[k] = original - | `Base | `Directive -> - top_defs[k] = original - end - for exlo in ivalues (v) do - local expr, this_loader = unpack(exlo) - assert (this_loader[1]==loader[1] and this_loader.tag==loader.tag, - "Autolocal lost by homonymous declarations") - expr <- `Id{k} - end - end - - -- Insert beginning-of-file local declarations - local top_locals = `Local{ { }, { } } - for k, v in pairs(top_defs) do - table.insert(top_locals[1], `Id{k}) - table.insert(top_locals[2], v) - end - table.insert (CHECKED_AST, 1, top_locals) - - -- Insert declarations after require() statements - for req_stat, renamings in pairs (req_defs) do - local req_locals = `Local{ { }, { } } - local r2 = table.shallow_copy(req_stat) - req_stat <- { r2, req_locals }; req_stat.tag = nil - for k, v in pairs (renamings) do - table.insert(req_locals[1], `Id{k}) - table.insert(req_locals[2], v) - end - end - - if clopts_cfg.debug then table.print(CHECKED_AST, 'nohash', 60) end - local chunk = mlc.luacstring_of_ast (CHECKED_AST) - local f = io.open (CHECKED_NAME:gsub('%.m?lua', '')..'.luac', 'w') - f:write(chunk) - f:close() -end - --- RAM dumper -- - -function decl_dump(name, f) - match type(f) with - | 'nil' -> f=io.stdout - | 'string' -> f=io.open(f, 'w') or error ("Can't open file "..f) - | 'userdata' -> -- pass - | t -> error ("Invalid target file type "..t) - end - local indentation, acc, seen = 0, { }, { } - local function esc(n) - if n:gmatch "[%a_][%w_]*" and not decl_lexer.alpha[n] then return n else return '"'..n..'"' end - end - local function add_line(...) table.insert(acc, table.concat{' ':rep(indentation), ...}) end - local function rec(n, v) - if seen[v] then add_line ('free ', esc(n), ";") - elseif type(v)=='table' then - seen[v] = true - add_line ('module ', esc(n)) - indentation += 1 - for n2, v2 in pairs(v) do - if type(n2)=='string' then rec (n2, v2) end - end - indentation -= 1 - add_line 'end;' - else - add_line (esc(n), ';') - end - end - rec(name, _G[name]) - for line in ivalues (acc) do - f:write(line, '\n') - end - if f~=io.stdout then f:close() end -end - - --- options handling -- -require 'clopts' - -local cl_parser = clopts { - check_src_file, - - { short = 'd', long = 'debug', type = 'boolean', - usage = 'print debug traces', action = function(x) VERBOSE=x end }, - - { short = 'l', long = 'decl_lib', type = 'string*', usage = 'load decl lib', - action = function (x) LOAD_SOURCE=`Directive; return parse_decl_lib(x) end }, - - { short = 'f', long = 'decl_file', type = 'string*', usage = 'load decl file', - action = function (x) LOAD_SOURCE=`Directive; return parse_decl_file(x) end }, - - { short = 'x', long = 'decl_expr', type = 'string*', - usage = 'decl expression to eval', - action = function (x) LOAD_SOURCE=`Directive; return parse_decl_expr(x) end }, - - { short = 'a', long = 'autolocals', type = 'boolean', - usage = 'compiles the program with autolocals' } } - -LOAD_SOURCE = `Base -try_load_decl('lib', 'base') -clopts_cfg = cl_parser (...) -if clopts_cfg.autolocals then - replace_autolocals() -end \ No newline at end of file diff --git a/src/samples/synth.mlua b/src/samples/synth.mlua deleted file mode 100644 index 01090ee..0000000 --- a/src/samples/synth.mlua +++ /dev/null @@ -1,560 +0,0 @@ -require 'strict' - --{ extension 'match' } - -synth = { } -synth.__index = synth - --------------------------------------------------------------------------------- --- Instanciate a new AST->source synthetizer --------------------------------------------------------------------------------- -function synth.new () - local self = { - _acc = { }, -- Accumulates pieces of source as strings - current_indent = 0, -- Current level of line indentation - indent_step = " " -- Indentation symbol, normally spaces or '\t' - } - return setmetatable (self, synth) -end - --------------------------------------------------------------------------------- --- Run a synthetizer on the `ast' arg and return the source as a string. --- Can also be used as a static method `synth.run (ast)'; in this case, --- a temporary synthetizer is instanciated on the fly. --------------------------------------------------------------------------------- -function synth:run (ast) - if not ast then - self, ast = synth.new(), self - end - self._acc = { } - self:node (ast) - return table.concat (self._acc) -end - --------------------------------------------------------------------------------- --- Accumulate a piece of source file in the synthetizer. --------------------------------------------------------------------------------- -function synth:acc (x) - if x then table.insert (self._acc, x) end -end - --------------------------------------------------------------------------------- --- Accumulate an indented newline. --- Jumps an extra line if indentation is 0, so that --- toplevel definitions are separated by an extra empty line. --------------------------------------------------------------------------------- -function synth:nl () - if self.current_indent == 0 then self:acc "\n" end - self:acc ("\n" .. self.indent_step:rep (self.current_indent)) -end - --------------------------------------------------------------------------------- --- Increase indentation and accumulate a new line. --------------------------------------------------------------------------------- -function synth:nlindent () - self.current_indent = self.current_indent + 1 - self:nl () -end - --------------------------------------------------------------------------------- --- Decrease indentation and accumulate a new line. --------------------------------------------------------------------------------- -function synth:nldedent () - self.current_indent = self.current_indent - 1 - self:acc ("\n" .. self.indent_step:rep (self.current_indent)) -end - --------------------------------------------------------------------------------- --- Keywords, which are illegal as identifiers. --------------------------------------------------------------------------------- -local keywords = table.transpose { - "and", "break", "do", "else", "elseif", - "end", "false", "for", "function", "if", - "in", "local", "nil", "not", "or", - "repeat", "return", "then", "true", "until", - "while" } - --------------------------------------------------------------------------------- --- Return true iff string `id' is a legal identifier name. --------------------------------------------------------------------------------- -local function is_ident (id) - return id:strmatch "^[%a_][%w_]*$" and not keywords[id] -end - --------------------------------------------------------------------------------- --- Return true iff ast represents a legal function name for --- syntax sugar ``function foo.bar.gnat() ... end'': --- a series of nested string indexes, with an identifier as --- the innermost node. --------------------------------------------------------------------------------- -local function is_idx_stack (ast) - match ast with - | `Id{ _ } -> return true - | `Index{ left, `String{ _ } } -> return is_idx_stack (left) - | _ -> return false - end -end - --------------------------------------------------------------------------------- --- Operator precedences, in increasing order. --- This is not directly used, it's used to generate op_prec below. --------------------------------------------------------------------------------- -local op_preprec = { - { "or", "and" }, - { "lt", "le", "eq", "ne" }, - { "concat" }, - { "add", "sub" }, - { "mul", "div", "mod" }, - { "unary", "not", "len" }, - { "pow" }, - { "index" } } - --------------------------------------------------------------------------------- --- operator --> precedence table, generated from op_preprec. --------------------------------------------------------------------------------- -local op_prec = { } - -for prec, ops in ipairs (op_preprec) do - for op in ivalues (ops) do - op_prec[op] = prec - end -end - --------------------------------------------------------------------------------- --- operator --> source representation. --------------------------------------------------------------------------------- -local op_symbol = { - add = " + ", sub = " - ", mul = " * ", - div = " / ", mod = " % ", pow = " ^ ", - concat = " .. ", eq = " == ", ne = " ~= ", - lt = " < ", le = " <= ", ["and"] = " and ", - ["or"] = " or ", ["not"] = "not ", len = "# " } - --------------------------------------------------------------------------------- --- Accumulate the source representation of AST `node' in --- the synthetizer. Most of the work is done by delegating to --- the method having the name of the AST tag. --- If something can't be converted to normal sources, it's --- instead dumped as a `-{ ... }' splice in the source accumulator. --------------------------------------------------------------------------------- -function synth:node (node) - assert (self~=synth and self._acc) - if not node.tag then -- tagless block. - self:list (node, self.nl) - else - local f = synth[node.tag] - if type (f) == "function" then -- Delegate to tag method. - f (self, node, unpack (node)) - elseif type (f) == "string" then -- tag string. - self:acc (f) - else -- No appropriate method, fall back to splice dumping. - -- This cannot happen in a plain Lua AST. - self:acc " -{ " - self:acc (table.tostring (node, "nohash"), 80) - self:acc " }" - end - end -end - --------------------------------------------------------------------------------- --- Convert every node in the AST list `list' passed as 1st arg. --- `sep' is an optional separator to be accumulated between each list element, --- it can be a string or a synth method. --- `start' is an optional number (default == 1), indicating which is the --- first element of list to be converted, so that we can skip the begining --- of a list. --------------------------------------------------------------------------------- -function synth:list (list, sep, start) - for i = start or 1, # list do - self:node (list[i]) - if list[i + 1] then - if not sep then - elseif type (sep) == "function" then sep (self) - elseif type (sep) == "string" then self:acc (sep) - else error "Invalid list separator" end - end - end -end - --------------------------------------------------------------------------------- --- --- Tag methods. --- ------------ --- --- Specific AST node dumping methods, associated to their node kinds --- by their name, which is the corresponding AST tag. --- synth:node() is in charge of delegating a node's treatment to the --- appropriate tag method. --- --- Such tag methods are called with the AST node as 1st arg. --- As a convenience, the n node's children are passed as args #2 ... n+1. --- --- There are several things that could be refactored into common subroutines --- here: statement blocks dumping, function dumping... --- However, given their small size and linear execution --- (they basically perform series of :acc(), :node(), :list(), --- :nl(), :nlindent() and :nldedent() calls), it seems more readable --- to avoid multiplication of such tiny functions. --- --- To make sense out of these, you need to know metalua's AST syntax, as --- found in the reference manual or in metalua/doc/ast.txt. --- --------------------------------------------------------------------------------- - -function synth:Do (node) - self:acc "do" - self:nlindent () - self:list (node, self.nl) - self:nldedent () - self:acc "end" -end - -function synth:Set (node) - match node with - | `Set{ { `Index{ lhs, `String{ method } } }, - { `Function{ { `Id "self", ... } == params, body } } } - if is_idx_stack (lhs) and is_ident (method) -> - -- ``function foo:bar(...) ... end'' -- - self:acc "function " - self:node (lhs) - self:acc ":" - self:acc (method) - self:acc " (" - self:list (params, ", ", 2) - self:acc ")" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" - - | `Set{ { lhs }, { `Function{ params, body } } } if is_idx_stack (lhs) -> - -- ``function foo(...) ... end'' -- - self:acc "function " - self:node (lhs) - self:acc " (" - self:list (params, ", ") - self:acc ")" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" - - | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs } - if not is_ident (lhs1name) -> - -- ``foo, ... = ...'' when foo is *not* a valid identifier. - -- In that case, the spliced 1st variable must get parentheses, - -- to be distinguished from a statement splice. - -- This cannot happen in a plain Lua AST. - self:acc "(" - self:node (lhs1) - self:acc ")" - if lhs[2] then -- more than one lhs variable - self:acc ", " - self:list (lhs, ", ", 2) - end - self:acc " = " - self:list (rhs, ", ") - - | `Set{ lhs, rhs } -> - -- ``... = ...'', no syntax sugar -- - self:list (lhs, ", ") - self:acc " = " - self:list (rhs, ", ") - end -end - -function synth:While (node, cond, body) - self:acc "while " - self:node (cond) - self:acc " do" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" -end - -function synth:Repeat (node, body, cond) - self:acc "repeat" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "until " - self:node (cond) -end - -function synth:If (node) - for i = 1, #node-1, 2 do - -- for each ``if/then'' and ``elseif/then'' pair -- - local cond, body = node[i], node[i+1] - self:acc (i==1 and "if " or "elseif ") - self:node (cond) - self:acc " then" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - end - -- odd number of children --> last one is an `else' clause -- - if #node%2 == 1 then - self:acc "else" - self:nlindent () - self:list (node[#node], self.nl) - self:nldedent () - end - self:acc "end" -end - -function synth:Fornum (node, var, first, last) - local body = node[#node] - self:acc "for " - self:node (var) - self:acc " = " - self:node (first) - self:acc ", " - self:node (last) - if #node==5 then -- 5 children --> child #4 is a step increment. - self:acc ", " - self:node (node[4]) - end - self:acc " do" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" -end - -function synth:Forin (node, vars, generators, body) - self:acc "for " - self:list (vars, ", ") - self:acc " in " - self:list (generators, ", ") - self:acc " do" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" -end - -function synth:Local (node, lhs, rhs) - self:acc "local " - self:list (lhs, ", ") - if rhs[1] then - self:acc " = " - self:list (rhs, ", ") - end -end - -function synth:Localrec (node, lhs, rhs) - match node with - | `Localrec{ { `Id{name} }, { `Function{ params, body } } } - if is_ident (name) -> - -- ``local function name() ... end'' -- - self:acc "local function " - self:acc (name) - self:acc " (" - self:list (params, ", ") - self:acc ")" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" - - | _ -> - -- Other localrec are unprintable ==> splice them -- - -- This cannot happen in a plain Lua AST. -- - self:acc "-{ " - self:acc (table.tostring (node, 'nohash', 80)) - self:acc " }" - end -end - -function synth:Call (node, f) - -- single string or table literal arg ==> no need for parentheses. -- - local parens - match node with - | `Call{ _, `String{_} } - | `Call{ _, `Table{...}} -> parens = false - | _ -> parens = true - end - self:node (f) - self:acc (parens and " (" or " ") - self:list (node, ", ", 2) -- skip `f'. - self:acc (parens and ")") -end - -function synth:Invoke (node, f, method) - -- single string or table literal arg ==> no need for parentheses. -- - local parens - match node with - | `Invoke{ _, _, `String{_} } - | `Invoke{ _, _, `Table{...}} -> parens = false - | _ -> parens = true - end - self:node (f) - self:acc ":" - self:acc (method[1]) - self:acc (parens and " (" or " ") - self:list (node, ", ", 3) -- Skip args #1 and #2, object and method name. - self:acc (parens and ")") -end - -function synth:Return (node) - self:acc "return " - self:list (node, ", ") -end - -synth.Break = "break" -synth.Nil = "nil" -synth.False = "false" -synth.True = "true" -synth.Dots = "..." - -function synth:Number (node, n) - self:acc (tostring (n)) -end - -function synth:String (node, str) - -- format "%q" prints '\n' in an umpractical way IMO, - -- so this is fixed with the :gsub( ) call. - self:acc (string.format ("%q", str):gsub ("\\\n", "\\n")) -end - -function synth:Function (node, params, body) - self:acc "function " - self:acc " (" - self:list (params, ", ") - self:acc ")" - self:nlindent () - self:list (body, self.nl) - self:nldedent () - self:acc "end" -end - -function synth:Table (node) - if not node[1] then self:acc "{ }" else - self:acc "{" - self:nlindent () - for i, elem in ipairs (node) do - match elem with - | `Pair{ `String{ key }, value } if is_ident (key) -> - -- ``key = value''. -- - self:acc (key) - self:acc " = " - self:node (value) - - | `Pair{ key, value } -> - -- ``[key] = value''. -- - self:acc "[" - self:node (key) - self:acc "] = " - self:node (value) - - | _ -> - -- ``value''. -- - self:node (elem) - end - if node [i+1] then - self:acc "," - self:nl () - end - end - self:nldedent () - self:acc "}" - end -end - -function synth:Op (node, op, a, b) - -- Transform ``not (a == b)'' into ``a ~= b''. -- - match node with - | `Op{ "not", `Op{ "eq", _a, _b } } - | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } -> - op, a, b = "ne", _a, _b - | _ -> - end - - if b then -- binary operator. - local left_paren, right_paren - match a with - | `Op{ op_a, ...} if op_prec[op] >= op_prec[op_a] -> left_paren = true - | _ -> left_paren = false - end - - match b with -- FIXME: might not work with right assoc operators ^ and .. - | `Op{ op_b, ...} if op_prec[op] >= op_prec[op_b] -> right_paren = true - | _ -> right_paren = false - end - - self:acc (left_paren and "(") - self:node (a) - self:acc (left_paren and ")") - - self:acc (op_symbol [op]) - - self:acc (right_paren and "(") - self:node (b) - self:acc (right_paren and ")") - - else -- unary operator. - local paren - match a with - | `Op{ op_a, ... } if op_prec[op] >= op_prec[op_a] -> paren = true - | _ -> paren = false - end - self:acc (op_symbol[op]) - self:acc (paren and "(") - self:node (a) - self:acc (paren and ")") - end -end - -function synth:Paren (node, content) - self:acc "(" - self:node (content) - self:acc ")" -end - -function synth:Index (node, table, key) - local paren_table - -- Check precedence, see if parens are needed around the table -- - match table with - | `Op{ op, ... } if op_prec[op] < op_prec.index -> paren_table = true - | _ -> paren_table = false - end - - self:acc (paren_table and "(") - self:node (table) - self:acc (paren_table and ")") - - match key with - | `String{ field } if is_ident (field) -> - -- ``table.key''. -- - self:acc "." - self:acc (field) - | _ -> - -- ``table [key]''. -- - self:acc "[" - self:node (key) - self:acc "]" - end -end - -function synth:Id (node, name) - if is_ident (name) then - self:acc (name) - else -- Unprintable identifier, fall back to splice representation. - -- This cannot happen in a plain Lua AST. - self:acc "-{`Id " - self:String (node, name) - self:acc "}" - end -end - - --------------------------------------------------------------------------------- --- Read a file, get its AST, use synth to regenerate sources --- from that AST --------------------------------------------------------------------------------- -require 'metalua.compiler' -local filename = (arg[2] or arg[1]) or arg[0] -local ast = mlc.luafile_to_ast (filename) - -print(synth.run(ast)) diff --git a/src/samples/trycatch_test.mlua b/src/samples/trycatch_test.mlua deleted file mode 100644 index e89dfd3..0000000 --- a/src/samples/trycatch_test.mlua +++ /dev/null @@ -1,107 +0,0 @@ --{ extension 'trycatch' } - - ----------------------------------------------------------------------- -print "1) no error" -try - print(" Hi") -end - - ----------------------------------------------------------------------- -print "2) caught error" -try - error "some_error" -catch x then - printf(" Successfully caught %q", x) -end - - --- [[ ----------------------------------------------------------------------- -print "3) no error, with a finally" -try - print " Hi" -finally - print " Finally OK" -end - - ----------------------------------------------------------------------- -print "4) error, with a finally" -try - print " Hi" - error "bang" -catch "bang"/{_} then - print " Bang caught" -finally - print " Finally OK" -end - - ----------------------------------------------------------------------- -print "5) nested catchers" -try - try - error "some_error" - catch "some_other_error" then - assert (false, "mismatch, this must not happen") - end - catch "some_error"/{x} then - printf(" Successfully caught %q across a try that didn't catch", x) -catch x then - assert (false, "We shouldn't reach this catch-all") -end - - ----------------------------------------------------------------------- -print "6) nested catchers, with a 'finally in the inner one" -try - try - error "some_error" - catch "some_other_error" then - assert (false, "mismatch, this must not happen") - finally - print " Leaving the inner try-catch" - end -catch "some_error"/{x} then - printf(" Successfully caught %q across a try that didn't catch", x) -catch x then - assert (false, "We shouldn't reach this catch-all") -end - - ----------------------------------------------------------------------- -print "7) 'finally' intercepts a return from a function" -function f() - try - print " into f:" - return "F_RESULT" - assert (false, "I'll never go there") - catch _ then - assert (false, "No exception should be thrown") - finally - print " I do the finally before leaving f()" - end -end -local fr = f() -printf(" f returned %q", fr) - - ----------------------------------------------------------------------- -print "8) don't be fooled by nested functions" -function f() - try - local function g() return "from g" end - printf(" g() returns %q", g()) - return "from f" - catch _ then - assert (false, "No exception should be thrown") - end -end -local fr = f() -printf(" f returned %q", fr) - ----------------------------------------------------------------------- -print "*) done." - diff --git a/src/samples/types_test.mlua b/src/samples/types_test.mlua deleted file mode 100644 index 62f4a91..0000000 --- a/src/samples/types_test.mlua +++ /dev/null @@ -1,19 +0,0 @@ --{ extension "types" } --{ extension "clist" } - --- Uncomment this to turn typechecking code generation off: --- -{stat: types.enabled=false} - -function sum (x :: table(number)) :: number - local acc :: number = 0 - for i=1, #x do - acc = acc + x[i] -- .. 'x' -- converts to string - end - --acc='bug' -- put a string in a number variable - return acc -end - -x = { i for i=1,100 } ---x[23] = 'toto' -- string in a number list, sum() will complain -y = sum (x) -printf ("sum 1 .. %i = %i", #x, y) \ No newline at end of file diff --git a/src/samples/walk_id_test.mlua b/src/samples/walk_id_test.mlua deleted file mode 100644 index f8f01a0..0000000 --- a/src/samples/walk_id_test.mlua +++ /dev/null @@ -1,26 +0,0 @@ --{ extension 'match' } - -require 'metalua.walk.id' - -ast = +{ block: - y = type(1) - function foo(x) - local type = 'number' - assert(x==type or not x) - end - foo(x) } - -disp = |msg,ast| printf("\n%s:\n%s", msg, table.tostring(ast, 80, 'nohash')) -disp('initial term', ast) - -do -- Make globals explicit: - local ast = table.deep_copy(ast) - local cfg = { id = { } } - function cfg.id.free(i) - i <- `Index{ `Id '_G', `String{i[1]} } - return 'break' - end - walk_id.block(cfg, ast) - disp('Globals made explicit', ast) -end - diff --git a/src/samples/weaver.mlua b/src/samples/weaver.mlua deleted file mode 100644 index f428a41..0000000 --- a/src/samples/weaver.mlua +++ /dev/null @@ -1,120 +0,0 @@ -require 'metalua.mlc' -require 'metalua.walk' - -function weave_ast (src, ast, name) - - ------------------------------------------------------------------- - -- translation: associate an AST node to its recomposed source - -- ast_children: associate an AST node to the list of its children - -- ast_parent: associate an AST node to the list of its parent - -- weavable: whether an AST node supports weaving of its children - -- node: common walker config for exprs, stats & blocks - ------------------------------------------------------------------- - local translation, ast_children, ast_parent, weaveable, node = - { }, { }, { }, { }, { } - - ------------------------------------------------------------------- - -- Build up the parent/children relationships. This is not the same - -- as inclusion between tables: the relation we're building only - -- relates blocks, expressions and statements; in the AST, some - -- tables don't represent any of these node kinds. - -- For instance in `Local{ { `Id "x" }, { } }, `Id"x" is a child of - -- the `Local{ } node, although it's not directly included in it. - ------------------------------------------------------------------- - function node.down(ast, parent) - ---------------------------------------------------- - -- `Do{ } blocks are processed twice: - -- * once as a statement - -- * once as a block, child of itself - -- This prevents them from becoming their own child. - ---------------------------------------------------- - if ast==parent then return end - - if not ast.lineinfo then - weaveable [ast] = false, false - if parent then weaveable [parent] = false end - else - weaveable [ast] = true - - -- normalize lineinfo - -- TODO: FIXME - if ast.lineinfo.first[3] > ast.lineinfo.last[3] then - ast.lineinfo.first, ast.lineinfo.last = ast.lineinfo.last, ast.lineinfo.first - end - end - ast_children [ast] = { } - ast_parent [ast] = parent - if parent then table.insert (ast_children [parent], ast) end - end - - ------------------------------------------------------------------- - -- Visit up, from leaves to upper-level nodes, and weave leaves - -- back into the text of their parent node, recursively. Since the - -- visitor is imperative, we can't easily make it return a value - -- (the resulting recomposed source, here). Therefore we - -- imperatively store results in the association table - -- `translation'. - ------------------------------------------------------------------- - function node.up(ast) - local _acc = { } - local function acc(x) table.insert (_acc, x) end - - if not next(ast) then -- shadow node, remove from ast_children - local x = ast_children[ast_parent[ast]] - for i,a in ipairs (x) do if a==ast then table.remove (x, i); break end end - return "" -- no need to continue, we know that the node is empty! - end - - -- ast Can't be weaved normally, try something else -- - local function synthetize (ast) - acc "-{expr: " - acc (table.tostring (ast, 'nohash', 80, 8)) - acc " }" - end - - -- regular weaving of chidren in the parent's sources -- - local function weave (ast) - -- sort children in appearence order - local comp = |a,b| a.lineinfo.first[3] < b.lineinfo.first[3] - table.sort (ast_children [ast], comp) - - local li = ast.lineinfo - if not li then return synthetize (ast) end - local a, d = li.first[3], li.last[3] - for child in ivalues (ast_children [ast]) do - local li = child.lineinfo - local b, c = li.first[3], li.last[3] - acc (src:sub (a, b - 1)) - acc (translation [child]) - a = c + 1 - end - acc (src:sub (a, d)) - end - - -- compute the translation from the children's ones -- - if not translation [ast] then - if weaveable [ast] then weave (ast) else synthetize (ast) end - translation [ast] = table.concat (_acc) - end - end - - local cfg = { expr=node; stat=node; block=node } - walk.block (cfg, ast) - - return translation [ast] -end - --- Get the source. If none is given, use itself as an example. -- -local filename = arg[2] or arg[1] or arg[0] -local f = assert (io.open (filename, 'r')) -local src = f:read '*a' -f:close() - -local ast = mlc.luastring_to_ast (src, name) -if not next(ast) then - io.write (src) -- Empty ast, probably empty file, or comments only -else - local before = src:sub (1, ast.lineinfo.first[3]-1) - local after = src:sub (ast.lineinfo.last[3]+1, -1) - io.write (before .. weave_ast (src, ast) .. after) -end diff --git a/src/samples/withdo_test.mlua b/src/samples/withdo_test.mlua deleted file mode 100644 index 4e647b1..0000000 --- a/src/samples/withdo_test.mlua +++ /dev/null @@ -1,13 +0,0 @@ --{ extension 'withdo' } - -local original_close = io.close - -function x() - with f1, f2 = io.open 'withdo_test.mlua', io.open 'trycatch_test.mlua' do - local t1 = f1:read '*a' - local t2 = f2:read '*a' - return #t1, #t2 - end -end - -print(x()) \ No newline at end of file diff --git a/src/samples/xglobals_test.mlua b/src/samples/xglobals_test.mlua deleted file mode 100644 index 670b651..0000000 --- a/src/samples/xglobals_test.mlua +++ /dev/null @@ -1,40 +0,0 @@ --{ extension 'xglobal' } - ----------------------------------------------------------------------- -print "1) declare unassigned globals" -global a, b - ----------------------------------------------------------------------- -print "2) declare-and-assign global" -global c = 3 - ----------------------------------------------------------------------- -print "3) assign to pre-declared globals" -a, b = 1, 2 - ----------------------------------------------------------------------- -print "4) fail when setting an undeclared global" -local st1, msg1 = pcall(function() - a = 4 - d = 5 -- failure, assignment to undeclared global -end) -assert(not st1) -printf (" -> This error was expected: %s", msg1) - ----------------------------------------------------------------------- -print "5) fail when reading an undeclared global" -local st2, msg2 = pcall(function() - b = c -- OK - local _ = d -- failure, try to read undeclared global -end) -assert(not st2) -printf (" -> This error was expected: %s", msg2) - ----------------------------------------------------------------------- -print "6) check the globals' values" -assert(a==4) -assert(b==3) -assert(c==3) - ----------------------------------------------------------------------- -print "*) done." diff --git a/src/samples/xloop_test.mlua b/src/samples/xloop_test.mlua deleted file mode 100644 index 395a074..0000000 --- a/src/samples/xloop_test.mlua +++ /dev/null @@ -1,4 +0,0 @@ --{ extension 'xloop' } -for i=1,9 for j=10,90,10 if i~=3 while i<8 do - io.write(i+j, ' ') -end \ No newline at end of file diff --git a/src/samples/xmatch_test.mlua b/src/samples/xmatch_test.mlua deleted file mode 100755 index 9289775..0000000 --- a/src/samples/xmatch_test.mlua +++ /dev/null @@ -1,54 +0,0 @@ --{ extension 'xmatch' } - -WIDTH=60 -function p(msg) io.write(msg..' ':rep(WIDTH-#msg)) end - ----------------------------------------------------------------------- -p "match as an expression" -print(match 1 with 1 -> 'ok' | 2 -> 'KO') - ----------------------------------------------------------------------- -p "global match function" -match function g -| x if x<10 -> return 'o' -| _ -> return 'k' -end -print(g(1)..g(11)) - ----------------------------------------------------------------------- -p "global match function, multi-args" -match function cmp -| x, y if x return 'increasing' -| _, _ -> return 'decreasing' - end - -if cmp(1,2)=='increasing' and cmp(2,1)=='decreasing' then - print "ok" else print "KO" -end - ----------------------------------------------------------------------- -p "local match function" -do - local match function x - | 1 -> print 'ok' - end - x(1) -end -assert(not x) - ----------------------------------------------------------------------- -p "global bind assignment" -bind {a, b} = {'o', 'k'} -print(a..b) - ----------------------------------------------------------------------- -p "local bind assignment" -c, d = 'k', 'o' -do - local bind {c, {d}} = {'o', {'k'}} - print(c..d) -end - ----------------------------------------------------------------------- -p "local bind assignment scope" -print(d..c) diff --git a/src/tests/locals-and-stats.mlua b/src/tests/locals-and-stats.mlua deleted file mode 100644 index 1ac2283..0000000 --- a/src/tests/locals-and-stats.mlua +++ /dev/null @@ -1,15 +0,0 @@ -local foo - -x = -{ `Stat{ { `Local{ { `Id "B" }, - { `Stat{ { `Local{ { `Id "A" }, - { `Number 4 } }, - `Set{ { `Id "y" }, - { `Id "A" } } }, - `Id "A" } } }, - `Set{ { `Id "x" }, - { `Id "B" } } }, - `Id "B" } } - -assert(x==4) -print "Test passed." - diff --git a/src/tests/reweave.mlua b/src/tests/reweave.mlua deleted file mode 100644 index 9cf2258..0000000 --- a/src/tests/reweave.mlua +++ /dev/null @@ -1,25 +0,0 @@ --{ extension 'xloop' } - -ls = io.popen ( (os.getenv("OS") or "") :match "^Windows" and "dir /b reweave" or "ls reweave") -this_script = arg[1] - -local errors = {} - -for filename in ls :lines() if filename :strmatch "%.m?lua$" do - printf ("--- weaver check %s ---", filename) - local ret = os.execute ("metalua ../samples/weaver.mlua reweave/"..filename.." | diff -q reweave/"..filename.." -") - if ret ~= 0 then - print("================================================================================") - print("Reweaved source does not match original:") - print("================================================================================") - os.execute ("metalua ../samples/weaver.mlua reweave/"..filename.." | diff reweave/"..filename.." -") - errors[#errors + 1] = "Reweaving of "..filename.." failed, returned "..ret - end -end - -ls :close() - -if #errors > 0 then - print("================================================================================") - error("REWEAVING ERRORS DETECTED:\n * " .. table.concat(errors, "\n * ")) -end diff --git a/src/tests/reweave/comment.lua b/src/tests/reweave/comment.lua deleted file mode 100644 index 587d620..0000000 --- a/src/tests/reweave/comment.lua +++ /dev/null @@ -1,3 +0,0 @@ ---[[ -comment ---]] diff --git a/src/tests/reweave/comment2.lua b/src/tests/reweave/comment2.lua deleted file mode 100644 index 6222fb6..0000000 --- a/src/tests/reweave/comment2.lua +++ /dev/null @@ -1,2 +0,0 @@ ---[[comment]] -local code = 5 diff --git a/src/tests/reweave/comment_dup.lua b/src/tests/reweave/comment_dup.lua deleted file mode 100644 index 17afdfd..0000000 --- a/src/tests/reweave/comment_dup.lua +++ /dev/null @@ -1,8 +0,0 @@ -if true then - -- comment -end - -if true then - -- comment - print("something else after") -end diff --git a/src/tests/reweave/comments.lua b/src/tests/reweave/comments.lua deleted file mode 100644 index 05e8762..0000000 --- a/src/tests/reweave/comments.lua +++ /dev/null @@ -1,8 +0,0 @@ -# it eats ---[[ all ]] ---[===[ my ]===] -comments() -- foo ---[[ bar -baz ]] qqq() --- even -one() -- liners diff --git a/src/tests/reweave/dup.lua b/src/tests/reweave/dup.lua deleted file mode 100644 index e6428ab..0000000 --- a/src/tests/reweave/dup.lua +++ /dev/null @@ -1,2 +0,0 @@ -f(a > b) -f(c >= d) diff --git a/src/tests/reweave/empty.lua b/src/tests/reweave/empty.lua deleted file mode 100644 index e69de29..0000000 diff --git a/src/tests/reweave/extra_whitespace.lua b/src/tests/reweave/extra_whitespace.lua deleted file mode 100644 index 7358189..0000000 --- a/src/tests/reweave/extra_whitespace.lua +++ /dev/null @@ -1,16 +0,0 @@ -t = {} - -t = { } - -t {} - -t { } - -assert(count(function () end) == 1) - -for k,v,w in a do end - -repeat until 1; repeat until true; -while false do end; while nil do end; - -foo(1) { }; diff --git a/src/tests/reweave/function-index-decl.lua b/src/tests/reweave/function-index-decl.lua deleted file mode 100644 index a7c77ac..0000000 --- a/src/tests/reweave/function-index-decl.lua +++ /dev/null @@ -1,2 +0,0 @@ -function a.b.c.f1 (x) return x+1 end -function a.b.c:f2 (x,y) self[x] = y end diff --git a/src/tests/reweave/if.lua b/src/tests/reweave/if.lua deleted file mode 100644 index 32ddd19..0000000 --- a/src/tests/reweave/if.lua +++ /dev/null @@ -1 +0,0 @@ -f(5 > 7) diff --git a/src/tests/reweave/index_index.lua b/src/tests/reweave/index_index.lua deleted file mode 100644 index 6aa0028..0000000 --- a/src/tests/reweave/index_index.lua +++ /dev/null @@ -1 +0,0 @@ -function a.b.c (x) end diff --git a/src/tests/reweave/schema.lua b/src/tests/reweave/schema.lua deleted file mode 100644 index c5275ba..0000000 --- a/src/tests/reweave/schema.lua +++ /dev/null @@ -1,1286 +0,0 @@ -local print, verb, dbg, errr, print_table, printt = make_module_loggers("schema", "SCM") - -local CT, GMF, - game_const - = import 'game/const.lua' - { - 'chipTypes', - 'gameModeFlags' - } - -local MTF, - cast_type - = import (game_const.abilities) - { - 'manualTargetFlags', - 'castType' - } - -local AP, abiprob_mapping = import (game_const.abilities.property) - { - 'mappingInv', -- Note order (inverted goes first) - 'mapping' - } - -local PO, CM, CST, SO, - abie_const - = import 'abie/const.lua' - { - 'propObjects', - 'customMessages', - 'clientStat', - 'storeObjects' - } - -local non_empty_list, - no_check, - not_implemented, - get_children, - get_children_concat_newline, - get_children_concat_str, - get_children_concat_table, - get_value, - get_value_quoted, - get_value_tonumber, - check_mapping_tonumber, - get_value_mapped_tonumber_quoted, - node_children_placeholders_filler, - check_tonumber - = import 'jsle/schema/util.lua' - { - 'non_empty_list', - 'no_check', - 'not_implemented', - 'get_children', - 'get_children_concat_newline', - 'get_children_concat_str', - 'get_children_concat_table', - 'get_value', - 'get_value_quoted', - 'get_value_tonumber', - 'check_mapping_tonumber', - 'get_value_mapped_tonumber_quoted', - 'node_children_placeholders_filler', - 'check_tonumber' - } - -local declare_common = import 'jsle/schema/common.lua' { 'declare_common' } - --- Optional TODOs: - --- TODO: Must be able to fetch back data from lang file to this schema. --- TODO: Write effect validation with human readable answers. Make it available via jobman's job. --- TODO: Write auto-conversion function for old abilities (v.1.01->current) --- TODO: Embed limitations on number of simultanious identical active OT effects --- TODO: Write checkers for numeric fields --- TODO: Adapt game/ctrl.lua to abie - -local define_schema = function(jsle) - assert_is_table(jsle) - --- WARNING: Return nil on error from handlers, do not return false -- it is a legitimate value. --- WARNING: Reordering of schema elements would result in INCOMPATIBLE format change! - - local propwrite_values = - { - { ["health"] = [[жизнь]] }; - { ["health_max"] = [[здоровье]] }; - { ["mana1"] = [[красную ману]] }; - { ["mana2"] = [[зелёную ману]] }; - { ["mana3"] = [[синюю ману]] }; - -- Note mana4 is reserved for health - { ["mana5"] = [[ману 5]] }; - { ["mana6"] = [[ману 6]] }; - { ["mana7"] = [[ману 7]] }; - { ["mana8"] = [[ману 8]] }; - { ["armor"] = [[броню]] }; - { ["fury"] = [[ярость]] }; - { ["block"] = [[блок]] }; - { ["fortune"] = [[удачу]] }; - { ["stun"] = [[оглушение]] }; - { ["armour_piercing"] = [[бронебойность]] }; - { ["agility"] = [[ловкость]] }; - { ["counterattack"] = [[контрудар]] }; - { ["damage"] = [[базовый урон]] }; - { ["damage_min"] = [[минимальный урон]] }; - { ["damage_max"] = [[максимальный урон]] }; - { ["damage_mult"] = [[множитель урона]] }; - { ["vampiric"] = [[вампиризм]] }; - { ["stun_count"] = [[оглушённость]] }; - } - - local propread_values = tiappend( - tclone(propwrite_values), - { - { ["race_id"] = [[расу]] }, - { ["level"] = [[уровень]] }, - { ["grade"] = [[степень]] }, -- TODO: clan_rank?! - { ["rank"] = [[ранг]] }, - { ["glory"] = [[доблесть]] }, - { ["scalps"] = [[скальпы]] }, - { ["kills"] = [[убийства]] }, - } - ) - - -- TODO: Be more specific. Should be at least "abie-1.03". - jsle:version("1.03") -- WARNING: Do an ordering cleanup when this changes - - jsle:record "ROOT" - { - children = - { - [1] = "TARGET_LIST"; - [2] = "IMMEDIATE_EFFECT_LIST"; - [3] = "OVERTIME_EFFECT"; - [4] = { "BOOLEAN", default = 0 }; -- Warning! Do not use BOOLOP_VARIANT, nothing of it would work at this point. - [5] = { "CUSTOM_OVERTIME_EFFECTS", default = empty_table }; - }; - html = [[

Цели

%C(1)%

Мгновенные эффекты

Игнорировать активацию в статистике:%C(4)%

Действия:%C(2)%

Овертайм-эффекты

%C(3)%
%C(5)%]]; - checker = no_check; - handler = function(self, node) - return self:effect_from_string( - node.value[1], -- Target list - node.value[4], -- Ignore usage stats flag - self:fill_placeholders( - node.value, -[[ -function(self) - self:set_custom_ot_effects($(5)) - - do - $(2) - end - - do - $(3) - end -end -]] - ) - ) - end; - } - - jsle:list "TARGET_LIST" - { - type = "TARGET_VALUE"; - html = [[%LIST(", ")%]]; - checker = non_empty_list; - handler = function(self, node) - local result = 0 - for i, v in ipairs(node.value) do - result = result + v - end - return result - end; - } - - jsle:enum "TARGET_VALUE" - { - values = - { - { [MTF.AUTO_ONLY] = [[неинтерактивно]] }; - { [MTF.SELF_HUMAN] = [[на себя]] }; - { [MTF.SELF_TEAM_HUMAN] = [[на человека в своей команде]] }; - { [MTF.OPP_HUMAN] = [[на противника]] }; - { [MTF.OPP_TEAM_HUMAN] = [[на человека в команде противника]] }; - { [MTF.FIELD_CHIP] = [[на фишку]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_tonumber; - numeric_keys = true; - } - - jsle:list "IMMEDIATE_EFFECT_LIST" - { - type = "ACTION_VARIANT"; - html = [[%LE("Нет")%%LNE("
  1. ")%%LIST("
  2. ")%%LNE("
")%]]; - checker = no_check; - handler = get_children_concat_newline; - } - - jsle:record "OVERTIME_EFFECT" - { - children = - { - [1] = "OT_EFFECT_TARGET"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - [4] = "BOOLOP_VARIANT"; - [5] = "OVERTIME_EFFECT_LIST"; - [6] = "OVERTIME_EFFECT_LIST"; - [7] = "OVERTIME_EFFECT_LIST"; - [8] = "OT_MODIFIER_LIST"; - [9] = "NUMOP_VARIANT"; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts) - [10] = "NUMOP_VARIANT"; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts) - [11] = { "GAME_MODES", default = GMF.ALL }; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts) - [12] = { "BOOLEAN", default = 0 }; - }; - html = [[
Цель: %C(1)%
Время жизни: %C(2)% (≥255 — бессрочно)
Период: %C(3)%
Изначальный кулдаун: %C(10)%
Сброс в конце боя: %C(4)%
Остается при снятии всех эффектов вручную: %C(12)%
Максимальное число одновременно активных эффектов: %C(9)% (0 — не ограничено)
Игровые режимы: %C(11)%

При изменении набора характеристик

%C(5)%

В конце хода цели

%C(7)%

Временные модификаторы (кроме жизни)

%C(8)%]]; - checker = no_check; - handler = function(self, node) - if - node.value[5] ~= "" or - node.value[6] ~= "" or - node.value[7] ~= "" or - node.value[8] ~= "{}" - then - -- Spawning OT effect only if have any actions in it. - return node_children_placeholders_filler - [[ - self:spawn_overtime_effect( - $(1), - $(2), - $(3), - $(10), - $(4), - $(9), - function(self) - $(5) - end, - function(self) - $(6) - end, - function(self) - $(7) - end, - $(8), - $(11), - $(12) - ) - ]] (self, node) - else - return [[-- No OT effects]] - end - end; - } - - jsle:list "OT_MODIFIER_LIST" - { - type = "OT_MODIFIER_VARIANT"; - html = [[%LE("Нет")%%LNE("
  1. ")%%LIST("
  2. ")%%LNE("
")%]]; - checker = no_check; - handler = get_children_concat_table; - } - - jsle:variant "OT_MODIFIER_VARIANT" - { - values = - { - { ["MOD_SET"] = [[Установить]] }; - { ["MOD_INC"] = [[Увеличить]] }; - { ["MOD_DEC"] = [[Уменьшить]] }; - { ["MOD_MULT"] = [[Умножить]] }; - }; - label = [["M"]]; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value; - } - - jsle:record "MOD_SET" - { - children = - { - [1] = "PROPWRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Установить %C(1)% цели в %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return ($(2)) end; }]]; - } - - jsle:record "MOD_INC" - { - children = - { - [1] = "PROPWRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Увеличить %C(1)% цели на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value + ($(2)) end; }]]; - } - - jsle:record "MOD_DEC" - { - children = - { - [1] = "PROPWRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Уменьшить %C(1)% цели на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value - ($(2)) end; }]]; - } - - jsle:record "MOD_MULT" - { - children = - { - [1] = "PROPWRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Умножить %C(1)% цели на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value * ($(2)) end; }]]; - } - - jsle:list "OVERTIME_EFFECT_LIST" - { - type = "ACTION_VARIANT"; - html = [[%LE("Нет")%%LNE("
  1. ")%%LIST("
  2. ")%%LNE("
")%]]; - checker = no_check; - handler = get_children_concat_newline; - } - - jsle:list "ACTION_LIST" - { - type = "ACTION_VARIANT"; - html = [[
  1. %LIST("
  2. ")%
]]; - checker = non_empty_list; - handler = get_children_concat_newline; - } - - jsle:variant "ACTION_VARIANT" - { - values = - { - { ["ACT_SET"] = [[Установить]] }; - { ["ACT_INC"] = [[Увеличить]] }; - { ["ACT_DEC"] = [[Уменьшить]] }; - { ["ACT_MULT"] = [[Умножить]] }; - { ["ACT_DIRECTSET"] = [[Установить напрямую]] }; - { ["ACT_DIRECTINC"] = [[Увеличить напрямую]] }; - { ["ACT_DIRECTDEC"] = [[Уменьшить напрямую]] }; - { ["ACT_DIRECTMULT"] = [[Умножить напрямую]] }; - { ["ACT_FLDEXPLODE"] = [[Взорвать фишки]] }; - { ["ACT_FLDLEVELDELTA"] = [[Поднять уровень фишек]] }; - { ["ACT_FLDCOLLECT_COORDS"] = [[Собрать фишки по координатам]] }; - { ["ACT_FLDREPLACE_COORDS"] = [[Заменить фишки по координатам]] }; - { ["ACT_ONEMOREACTION"] = [[Дать ещё одно действие]] }; - { ["ACT_KEEPTIMEOUT"] = [[Не сбрасывать таймер]] }; - { ["ACT_SETVAR"] = [[Запомнить]] }; - { ["ACT_SETOBJVAR_LOCAL"] = [[Запомнить в объекте локально]] }; - { ["ACT_SETOBJVAR_GLOBAL"] = [[Запомнить в объекте глобально]] }; - { ["ACT_SETOBJVAR_OT"] = [[Запомнить в текущем овертайме]] }; - { ["ACT_DOIF"] = [[Если]] }; - { ["ACT_DOIFELSE"] = [[Если ... иначе]] }; - { ["ACT_PLAYABIANIM"] = [[Играть эффект абилки]] }; - { ["ACT_SENDCUSTOMMSG"] = [[Отправить данные клиентам]] }; - { ["ACT_INCSTAT"] = [[Увеличить статистику клиента]] }; - { ["ACT_ACTIVATEOT"] = [[Активировать ОТ-эффект]] }; - { ["ACT_REMOVE_OVERTIMES"] = [[Снять ОТ-эффекты]] }; - -- Keep these below -- - { ["ACT_FLDREPLACE"] = [[Заменить фишки (устарело)]] }; - { ["ACT_CRASH_GAME"] = [[УРОНИТЬ игру (только для тестов)]] }; - -- { ["PLAINLUA"] = [[Lua]] }; - }; - label = [["A"]]; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value; - } - - declare_common(jsle, "ACT_DOIF", "ACT_DOIFELSE") - - jsle:record "ACT_SET" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Установить %C(1)% в %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propset($(1), $(2))]]; - } - - jsle:record "ACT_INC" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Увеличить %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propinc($(1), $(2))]]; - } - - jsle:record "ACT_DEC" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Уменьшить %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propdec($(1), $(2))]]; - } - - jsle:record "ACT_MULT" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Умножить %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propmult($(1), $(2))]]; - } - - jsle:record "ACT_DIRECTSET" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Установить напрямую %C(1)% в %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propset_direct($(1), $(2))]]; - } - - jsle:record "ACT_DIRECTINC" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Увеличить напрямую %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propinc_direct($(1), $(2))]]; - } - - jsle:record "ACT_DIRECTDEC" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Уменьшить напрямую %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propdec_direct($(1), $(2))]]; - } - - jsle:record "ACT_DIRECTMULT" - { - children = - { - [1] = "PROPPATH_WRITE"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Умножить напрямую %C(1)% на %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propmult_direct($(1), $(2))]]; - } - - jsle:record "ACT_FLDEXPLODE" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = "CHIPCOORD"; - }; - html = [[Взорвать бомбу радиусом %C(1)% в координатах %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_explode($(1), $(2))]]; - } - - jsle:record "ACT_FLDREPLACE" - { - children = - { - [1] = "CHIPTYPE"; - [2] = "NUMOP_VARIANT"; - [3] = "CHIPTYPE"; - [4] = "NUMOP_VARIANT"; - }; - html = [[Заменить %C(1)% уровня %C(2)% на %C(3)% уровня %C(4)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_replace($(1), $(2), $(3), $(4))]]; - doc = [[Deprecated, use other replace actions]]; - } - - jsle:record "ACT_FLDLEVELDELTA" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = "CHIPTYPE"; - [3] = "NUMOP_VARIANT"; - [4] = "NUMOP_VARIANT"; - }; - html = [[Поднять уровень %C(2)% на %C(1)% в диапазоне от %C(3)% до %C(4)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_level_delta($(1), $(2), $(3), $(4))]]; - } - - jsle:record "ACT_FLDCOLLECT_COORDS" - { - children = - { - [1] = "COORDLISTOP_VARIANT"; - }; - html = [[Собрать %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_collect_coords($(1))]]; - } - - jsle:record "ACT_FLDREPLACE_COORDS" - { - children = - { - [1] = "COORDLISTOP_VARIANT"; - [2] = "CHIPTYPE_LIST"; - [3] = "NUMOP_VARIANT"; - }; - html = [[Заменить %C(1)% на %C(2)% уровня %C(3)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_replace_coords($(1),$(2),$(3))]]; - } - - jsle:literal "ACT_ONEMOREACTION" - { - html = [[Дать ещё одно действие (только мгновенный эффект)]]; - checker = no_check; - handler = invariant [[self:one_more_action()]]; - } - - jsle:literal "ACT_KEEPTIMEOUT" - { - html = [[Не сбрасывать таймер (только мгновенный эффект)]]; - checker = no_check; - handler = invariant [[self:keep_timeout()]]; - } - - jsle:record "ACT_SETVAR" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Запомнить в №%C(1)% значение %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:setvar($(1), $(2))]]; - } - - jsle:enum "OT_EFFECT_TARGET" - { - values = - { - { [PO.SELF] = [[на себя]] }; - { [PO.OPP] = [[на противника]] }; - { [PO.TARGET] = [[на цель]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_quoted; - } - - jsle:variant "BOOLOP_VARIANT" - { - values = - { - { ["BOOLEAN"] = [[Логическое значение]] }; - { ["BOOLOP_LT"] = [[<]] }; - { ["BOOLOP_LTE"] = [[≤]] }; - { ["BOOLOP_GT"] = [[>]] }; - { ["BOOLOP_GTE"] = [[≥]] }; - { ["BOOLOP_EQ"] = [[==]] }; - { ["BOOLOP_NEQ"] = [[!=]] }; - { ["BOOLOP_AND_MANY"] = [[И (Список)]] }; - { ["BOOLOP_OR_MANY"] = [[ИЛИ (Список)]] }; - { ["BOOLOP_NOT"] = [[НЕ]] }; - { ["BOOLOP_HAVEMEDAL"] = [[МЕДАЛЬ]] }; - { ["BOOLOP_ISACTIVE"] = [[Изменения инициированы целью овертайм-эффекта]] }; - { ["BOOLOP_IS_GAME_IN_MODE"] = [[Текущий игровой режим]] }; - -- Deprecated, keep below -- - { ["BOOLOP_AND"] = [[И]] }; - { ["BOOLOP_OR"] = [[ИЛИ]] }; - --{ ["PLAINLUA"] = [[Lua]] }; - }; - label = [["B"]]; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value; - } - - jsle:record "BOOLOP_HAVEMEDAL" - { - children = - { - [1] = "PROPOBJECT"; - [2] = "NUMOP_VARIANT"; - }; - html = [[есть медаль №%C(2)% %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:have_medal($(1), $(2))]]; - } - - jsle:literal "BOOLOP_ISACTIVE" - { - html = [[изменения инициированы целью овертайм-эффекта]]; - checker = no_check; -- Only for on_changeset event. - handler = invariant [[self:is_overtime_target_active()]]; - } - - declare_common( - jsle, - "BOOLOP_LT", - "BOOLOP_LTE", - "BOOLOP_GT", - "BOOLOP_GTE", - "BOOLOP_EQ", - "BOOLOP_NEQ", - "BOOLOP_AND", - "BOOLOP_OR", - "BOOLOP_NOT" - ) - - jsle:variant "NUMOP_VARIANT" - { - values = - { - { ["NUMBER"] = [[Число]] }; - { ["NUMOP_ADD_MANY"] = [[+ (Список)]] }; - { ["NUMOP_DEC_MANY"] = [[- (Список)]] }; - { ["NUMOP_MUL_MANY"] = [[* (Список)]] }; - { ["NUMOP_DIV_MANY"] = [[/ (Список)]] }; - { ["NUMOP_POV"] = [[POW]] }; -- TODO: POW, not POV! Fix by search and replace - { ["NUMOP_MOD"] = [[MOD]] }; - { ["NUMOP_MIN"] = [[MIN]] }; - { ["NUMOP_MAX"] = [[MAX]] }; - { ["NUMOP_UNM"] = [[Знак]] }; - { ["NUMOP_GET"] = [[Характеристика]] }; - { ["NUMOP_GET_RAW"] = [[Базовое значение характеристики]] }; - { ["NUMOP_GET_ABIPROP"] = [[Характеристика абилки]] }; - { ["NUMOP_PERCENT_ROLL"] = [[Cлучайный процент]] }; - { ["NUMOP_TEAMSIZE"] = [[Размер команды]] }; - { ["NUMOP_GETVAR"] = [[Вспомнить]] }; - { ["NUMOP_GETOBJVAR_LOCAL"] = [[Вспомнить из объекта локально]] }; - { ["NUMOP_GETOBJVAR_GLOBAL"] = [[Вспомнить из объекта глобально]] }; - { ["NUMOP_GETOBJVAR_OT"] = [[Вспомнить из текущего овертайма]] }; - { ["NUMOP_OTLIFETIMELEFT"] = [[Оставшееся время жизни]] }; - { ["NUMOP_OTLIFETIMETOTAL"] = [[Общее время жизни]] }; - { ["NUMOP_FLDGETQUANTITYOFCHIPS"] = [[Число фишек по цвету и уровню]] }; - { ["NUMOP_TARGETX"] = [[Координата X выбранной фишки]] }; - { ["NUMOP_TARGETY"] = [[Координата Y выбранной фишки]] }; - { ["NUMOP_OTEFFECTCOUNT"] = [[Число активных овертайм-эффектов]] }; - { ["NUMOP_IFF"] = [[Если]] }; - { ["NUMOP_GETUID"] = [[Идентификатор игрока]] }; - -- Keep these below -- - { ["NUMOP_FLDCOUNTCHIPS"] = [[Число фишек на поле (устарело)]] }; - { ["NUMOP_ADD"] = [[+]] }; - { ["NUMOP_DEC"] = [[-]] }; - { ["NUMOP_MUL"] = [[*]] }; - { ["NUMOP_DIV"] = [[/]] }; - { ["NUMOP_CRASH_GAME"] = [[УРОНИТЬ игру (только для тестов)]] }; - --{ ["PLAINLUA"] = [[Lua]] }; - }; - label = [["I"]]; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value; - } - - declare_common( - jsle, - "NUMOP_ADD", - "NUMOP_DEC", - "NUMOP_MUL", - "NUMOP_DIV", - "NUMOP_POV", - "NUMOP_MOD", - "NUMOP_MIN", - "NUMOP_MAX", - "NUMOP_UNM" - ) - - jsle:record "NUMOP_GET" - { - children = - { - [1] = "PROPPATH_READ"; - }; - html = [[%C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propget($(1), false)]]; - } - - declare_common(jsle, "NUMOP_PERCENT_ROLL") - - jsle:record "NUMOP_FLDCOUNTCHIPS" - { - children = - { - [1] = "CHIPTYPE"; - [2] = "BOOLOP_VARIANT"; - }; - html = [[число %C(1)% на поле (учитывая уровни: %C(2)%)]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_count_chips($(1), $(2))]]; - doc = [[Deprecated, use other chip count operations]]; - } - - jsle:record "NUMOP_TEAMSIZE" - { - children = - { - [1] = "PROPOBJECT"; - }; - html = [[размер команды %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:team_size($(1))]]; - } - - jsle:record "NUMOP_GETVAR" - { - children = - { - [1] = "NUMOP_VARIANT"; - }; - html = [[вспомнить из №%C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:getvar($(1))]]; - } - - jsle:literal "NUMOP_OTLIFETIMELEFT" - { - html = [[оставшееся время жизни]]; - checker = no_check; - handler = invariant [[self:ot_lifetime_left()]]; - } - - jsle:literal "NUMOP_OTLIFETIMETOTAL" - { - html = [[общее время жизни]]; - checker = no_check; - handler = invariant [[self:ot_lifetime_total()]]; - } - - jsle:literal "NUMOP_TARGETX" - { - html = [[X выбранной фишки]]; - checker = no_check; - handler = invariant [[self:target_x()]]; - } - - jsle:literal "NUMOP_TARGETY" - { - html = [[Y выбранной фишки]]; - checker = no_check; - handler = invariant [[self:target_y()]]; - } - - jsle:record "PROPPATH_WRITE" - { - children = - { - [1] = "PROPOBJECT"; - [2] = "PROPWRITE"; - }; - html = [[%C(2)% %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:make_proppath($(1), $(2))]]; - } - - jsle:record "PROPPATH_READ" - { - children = - { - [1] = "PROPOBJECT"; - [2] = "PROPREAD"; - }; - html = [[%C(2)% %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:make_proppath($(1), $(2))]]; - } - - jsle:enum "PROPOBJECT" - { - values = - { - { [PO.SELF] = [[у себя]] }; - { [PO.OPP] = [[у противника]] }; - { [PO.TARGET] = [[у цели]] }; - { [PO.OWN_CHANGESET] = [[в своём наборе изменений]] }; - { [PO.OPP_CHANGESET] = [[в наборе изменений противника]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; -- Check value is valid for current action list subtype - handler = get_value_quoted; - } - - jsle:enum "PROPWRITE" - { - values = propwrite_values; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_quoted; - } - - jsle:enum "PROPREAD" - { - values = propread_values; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_quoted; - } - - jsle:enum "CHIPTYPE" - { - values = - { - { [CT.EMERALD] = [[зелёных фишек]] }; - { [CT.RUBY] = [[красных фишек]] }; - { [CT.AQUA] = [[синих фишек]] }; - { [CT.DMG] = [[черепов]] }; - { [CT.CHIP5] = [[фишек-5]] }; - { [CT.CHIP6] = [[фишек-6]] }; - { [CT.CHIP7] = [[фишек-7]] }; - { [CT.CHIP8] = [[фишек-8]] }; - { [CT.EMPTY] = [[пустых фишек]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_tonumber; - numeric_keys = true; - } - - jsle:edit "NUMBER" - { - size = 4; - numeric = true; - checker = check_tonumber; - handler = get_value_tonumber; - } - - declare_common( - jsle, - "BOOLEAN", - "PLAINLUA" - ) - - jsle:list "COORDLISTOP_STD" - { - type = "CHIPCOORD"; - html = [[фишки с координатами %LIST(", ")%]]; - checker = non_empty_list; - handler = get_children_concat_table; - } - - jsle:record "CHIPCOORD" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = "NUMOP_VARIANT"; - }; - html = [[(x: %C(1)%, y: %C(2)%)]]; - checker = no_check; - handler = node_children_placeholders_filler [[{x=$(1), y=$(2)}]]; - } - - -- TODO: UNUSED. Remove or use. - jsle:record "BOOLOP_SELECTEDTARGET" - { - children = - { - [1] = "TARGET_VALUE"; - }; - html = [[выбрана цель %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:is_target_selected($(1))]]; - doc = [[Currently not used]]; - } - - jsle:record "NUMOP_OTEFFECTCOUNT" - { - children = - { - [1] = "PROPOBJECT"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - }; - html = [[число овертайм-эффектов абилки ID %C(2)% (0 — этот эффект) № эффекта %C(3)% (0 — по умолчанию), активных %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:active_ot_effect_count($(1), $(2), $(3))]]; - } - - declare_common(jsle, "NUMOP_IFF") - - jsle:record "NUMOP_GET_RAW" - { - children = - { - [1] = "PROPPATH_READ"; - }; - html = [[базовое значение %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:propget($(1), true)]]; - } - - -- TODO: Get rid of non-list versions! - - declare_common( - jsle, - "NUMOP_ADD_MANY", - "NUMOP_DEC_MANY", - "NUMOP_MUL_MANY", - "NUMOP_DIV_MANY" - ) - - declare_common( - jsle, - "BOOLOP_AND_MANY", - "BOOLOP_OR_MANY" - ) - - jsle:list "CHIPTYPE_LIST" - { - type = "CHIPTYPE"; - html = [[%LIST(", ")%]]; - checker = non_empty_list; - handler = get_children_concat_table; - } - - jsle:record "NUMOP_GET_ABIPROP" - { - children = - { - [1] = "ABIPROP_NAME"; - }; - html = [[%C(1)% абилки]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:abipropget($(1))]]; - } - - jsle:enum "ABIPROP_NAME" - { - values = - { - { [AP.prob] = [[вероятность активации]] }; - }; - html = [[%VALUE()%]]; - checker = check_mapping_tonumber; - handler = get_value_mapped_tonumber_quoted(abiprob_mapping); - } - - jsle:record "ACT_SENDCUSTOMMSG" - { - children = - { - [1] = "NUMOP_LIST"; - }; - html = [[Отправить участникам боя данные: %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:send_custom_msg($(1))]]; - } - - declare_common(jsle, "NUMOP_LIST") - - jsle:record "ACT_PLAYABIANIM" - { - children = - { - [1] = "NUMOP_VARIANT"; - }; - html = [[Играть эффект абилки ID: %C(1)%]]; - checker = no_check; - -- Hack. Should format be hardcoded here or below? - handler = node_children_placeholders_filler( - [[self:send_custom_msg({]]..assert_is_number(CM.PLAYABIANIM) - ..[[, $(1), self:get_uid("]]..PO.SELF..[[")})]] - ); - } - - jsle:variant "COORDLISTOP_VARIANT" - { - values = - { - { ["COORDLISTOP_STD"] = [[Обычный список коордтнат]] }; - { ["COORDLISTOP_GETLEVEL"] = [[Фишки цвета цв1 с уровнями от ур1 до ур2]] }; - }; - label = [["C"]]; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value; - } - - jsle:record "COORDLISTOP_GETLEVEL" - { - children = - { - [1] = "CHIPTYPE"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - }; - html = [[%C(1)% с уровнями от %C(2)% до %C(3)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_get_coordlist_from_levels_and_type($(1), $(2), $(3))]]; - } - - jsle:record "NUMOP_FLDGETQUANTITYOFCHIPS" - { - children = - { - [1] = "CHIPTYPE"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - [4] = "BOOLOP_VARIANT"; - }; - html = [[число %C(1)% на поле уровней с %C(2)% до %C(3)% (учитывая уровень в счетчике: %C(4)%)]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:fld_get_quantity_of_chips($(1), $(2), $(3), $(4))]]; - } - - jsle:enum "CLIENTSTAT" - { - values = - { - -- TODO: Support commented out variants? - { [CST.SPELL_USE] = [[исп. спеллов]] }; - --{ [CST.SPELL_FRAG] = [[фраги от спеллов]] }; - { [CST.CONSUMABLE_USE] = [[исп. расходников]] }; - --{ [CST.CONSUMABLE_FRAG] = [[фраги от расходников]] }; - { [CST.AUTOABILITY_USE] = [[исп. автоабилок]] }; - --{ [CST.AUTOABILITY_FRAG] = [[фраги от автоабилок]] }; - --{ [CST.RATING] = [[рейтинг]] }; - --{ [CST.CUSTOM] = [[пользовательская]] }; - }; - html = [[%VALUE()%]]; - checker = check_mapping_tonumber; - handler = get_value_tonumber; - } - - jsle:record "ACT_INCSTAT" - { - children = - { - [1] = "PROPOBJECT"; - [2] = "CLIENTSTAT"; - [3] = "NUMOP_VARIANT"; - [4] = "NUMOP_VARIANT"; - }; - html = [[Увеличить %C(1)% статистику «%C(2)%» эффекта №%C(3)% (0 — текущий) на %C(4)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:inc_client_stat($(1), $(2), $(3), $(4))]]; - } - - jsle:record "ACT_ACTIVATEOT" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = { "KEYVALUE_LIST", default = empty_table }; - }; - html = [[Активировать ОТ-эффект №%C(1)%, передав %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:activate_custom_ot_effect($(1),$(2))]]; - } - - jsle:list "CUSTOM_OVERTIME_EFFECTS" - { - type = "OVERTIME_EFFECT"; - html = [[%LE("(Нет дополнительных ОТ-эффектов)")%%LNE("
  1. Дополнительный OT-эффект

    ")%%LIST("
  2. Дополнительный OT-эффект

    ")%%LNE("
")%]]; - checker = no_check; - handler = function(self, node) - local buf = {[[{]]} - local _ = function(v) buf[#buf + 1] = tostring(v) end - for i, child in ipairs(node.value) do - _ [[ -[]] _(i) _[[] = function(self) -]] _(child) _ [[ -end; -]] - end - _ [[}]] - return table.concat(buf) - end; - } - - jsle:record "NUMOP_GETUID" - { - children = - { - [1] = "PROPOBJECT"; - }; - html = [[идентификатор игрока %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:get_uid($(1))]]; - } - - jsle:enum "STORE_OBJ" - { - values = - { - { [SO.CLIENT_SELF] = [[на себе]] }; - { [SO.CLIENT_OPP] = [[на противнике]] }; - { [SO.CLIENT_TARGET] = [[на цели]] }; - { [SO.FIGHT] = [[на бою]] }; - { [SO.GAME] = [[на игре]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_tonumber; - } - - jsle:record "ACT_SETOBJVAR_LOCAL" - { - children = - { - [1] = "STORE_OBJ"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - }; - html = [[Запомнить в объекте «%C(1)%» в слот №%C(2)% приватное значение %C(3)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:setobjvar_local($(1), $(2), $(3))]]; - } - - jsle:record "NUMOP_GETOBJVAR_LOCAL" - { - children = - { - [1] = "STORE_OBJ"; - [2] = "NUMOP_VARIANT"; - }; - html = [[вспомнить из объекта «%C(1)%» из слота №%C(2)% приватное значение]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:getobjvar_local($(1), $(2))]]; - } - - jsle:record "ACT_SETOBJVAR_GLOBAL" - { - children = - { - [1] = "STORE_OBJ"; - [2] = "NUMOP_VARIANT"; - [3] = "NUMOP_VARIANT"; - }; - html = [[Запомнить в объекте %C(1)% в слот №%C(2)% публичное значение %C(3)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:setobjvar_global($(1), $(2), $(3))]]; - } - - jsle:record "NUMOP_GETOBJVAR_GLOBAL" - { - children = - { - [1] = "STORE_OBJ"; - [2] = "NUMOP_VARIANT"; - }; - html = [[вспомнить из объекта %C(1)% из слота №%C(2)% публичное значение]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:getobjvar_global($(1), $(2))]]; - } - - jsle:record "ACT_REMOVE_OVERTIMES" - { - children = - { - [1] = "OT_EFFECT_TARGET"; - }; - html = [[Снять все эффекты, наложенные %C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:remove_overtime_effects($(1))]]; - } - - jsle:enum "GAME_MODES" - { - values = - { - { [GMF.ALL] = [[любой]] }; - { [GMF.DUEL] = [[дуэль]] }; - { [GMF.SINGLE] = [[одиночная игра]] }; - }; - html = [[%VALUE()%]]; - checker = no_check; - handler = get_value_tonumber; - } - - jsle:record "BOOLOP_IS_GAME_IN_MODE" - { - children = - { - [1] = "GAME_MODES"; - }; - html = [[игровой режим «%C(1)%» включён]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:is_game_in_mode($(1))]]; - } - - jsle:record "ACT_SETOBJVAR_OT" - { - children = - { - [1] = "NUMOP_VARIANT"; - [2] = "NUMOP_VARIANT"; - }; - html = [[Запомнить в текущем овертайме в слот №%C(1)% значение %C(2)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:setobjvar_ot($(1), $(2))]]; - } - - jsle:record "NUMOP_GETOBJVAR_OT" - { - children = - { - [1] = "NUMOP_VARIANT"; - }; - html = [[Вспомнить из текущего овертайма из слота №%C(1)%]]; - checker = no_check; - handler = node_children_placeholders_filler [[self:getobjvar_ot($(1))]]; - } - - declare_common( - jsle, - "KEYVALUE_LIST", - "KEYVALUE" - ) - - jsle:literal "ACT_CRASH_GAME" - { - html = [[УРОНИТЬ игру (только для теста)]]; - checker = function(self, node) - if common_get_config().crashers_enabled == true then - errr("WARNING: ACT_CRASH_GAME CRASHER IS ON") - return true - end - - errr("DETECTED ATTEMPT TO UPLOAD CRASHERS (SCHEMA)") - return false, "crashers are disabled in config" - end; - handler = invariant [[self:crash_game()]]; - } - - jsle:literal "NUMOP_CRASH_GAME" - { - html = [[УРОНИТЬ игру (только для теста)]]; - checker = function(self, node) - if common_get_config().crashers_enabled == true then - errr("WARNING: NUMOP_CRASH_GAME CRASHER IS ON") - return true - end - - errr("DETECTED ATTEMPT TO UPLOAD CRASHERS (SCHEMA)") - return false, "crashers are disabled in config" - end; - handler = invariant [[(self:crash_game() or 0)]]; - } - - return jsle -end - -return -{ - define_schema = define_schema; -} diff --git a/src/tests/reweave/scope.lua b/src/tests/reweave/scope.lua deleted file mode 100644 index f871746..0000000 --- a/src/tests/reweave/scope.lua +++ /dev/null @@ -1,3 +0,0 @@ -do - print("scope") -end diff --git a/src/tests/reweave/str.lua b/src/tests/reweave/str.lua deleted file mode 100644 index bf63ff3..0000000 --- a/src/tests/reweave/str.lua +++ /dev/null @@ -1,2 +0,0 @@ -sample=[==========[perl -e 'print "";' > out]==========] -sample=[==========[perl -e 'print "";' > out]==========] diff --git a/src/tests/reweave/ws_simple.lua b/src/tests/reweave/ws_simple.lua deleted file mode 100644 index 044284a..0000000 --- a/src/tests/reweave/ws_simple.lua +++ /dev/null @@ -1 +0,0 @@ -repeat until true diff --git a/src/tests/run.mlua b/src/tests/run.mlua deleted file mode 100644 index b452056..0000000 --- a/src/tests/run.mlua +++ /dev/null @@ -1,37 +0,0 @@ --- Run all *.lua and *.mlua files in this directory. --- This makes it easy to run all tests in the directory, - --{ extension 'xloop' } - -LS_COMMANDS = { "ls", "dir /b" } -for i, cmd in ipairs(LS_COMMANDS) do - local f = io.popen (cmd) - ls = f :read '*a' - f :close() - if ls ~= '' then - break - elseif i == #LS_COMMANDS then - error "Can't figure out how to list files on your OS" - end -end - -this_script = arg[1] - -local errors = {} - -for filename in ls :gmatch "[^\n]+" if filename ~= this_script and filename :strmatch "%.m?lua$" do - printf ("*** running %s ***", filename) - local ret = os.execute ("metalua "..filename) - if ret ~= 0 then - errors[#errors + 1] = "Test "..filename.." failed, returned "..ret - end -end - -if #errors > 0 then - print("\n\n================================================================================") - error( - "TEST FAILURES DETECTED:\n" .. - "-----------------------\n" .. - " * " .. table.concat(errors, "\n * ") - ) -end -- 2.44.0