aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--cocoa/ScintillaFramework/ScintillaFramework.xcodeproj/project.pbxproj4
-rw-r--r--curses/Makefile37
-rw-r--r--curses/ScintillaCurses.cxx2
-rw-r--r--curses/jinx/Makefile18
-rw-r--r--curses/jinx/jinx.c10
-rw-r--r--doc/LPegLexer.html2608
-rw-r--r--doc/ScintillaDoc.html3
-rw-r--r--gtk/makefile20
-rw-r--r--include/SciLexer.h1
-rw-r--r--include/Scintilla.iface1
-rw-r--r--lexers/LexLPeg.cxx795
-rw-r--r--lexlua/actionscript.lua59
-rw-r--r--lexlua/ada.lua57
-rw-r--r--lexlua/ansi_c.lua90
-rw-r--r--lexlua/antlr.lua57
-rw-r--r--lexlua/apdl.lua74
-rw-r--r--lexlua/apl.lua57
-rw-r--r--lexlua/applescript.lua69
-rw-r--r--lexlua/asm.lua363
-rw-r--r--lexlua/asp.lua34
-rw-r--r--lexlua/autoit.lua132
-rw-r--r--lexlua/awk.lua297
-rw-r--r--lexlua/bash.lua60
-rw-r--r--lexlua/batch.lua52
-rw-r--r--lexlua/bibtex.lua45
-rw-r--r--lexlua/boo.lua64
-rw-r--r--lexlua/caml.lua62
-rw-r--r--lexlua/chuck.lua72
-rw-r--r--lexlua/cmake.lua140
-rw-r--r--lexlua/coffeescript.lua46
-rw-r--r--lexlua/container.lua5
-rw-r--r--lexlua/context.lua47
-rw-r--r--lexlua/cpp.lua75
-rw-r--r--lexlua/crystal.lua122
-rw-r--r--lexlua/csharp.lua68
-rw-r--r--lexlua/css.lua165
-rw-r--r--lexlua/cuda.lua71
-rw-r--r--lexlua/dart.lua57
-rw-r--r--lexlua/desktop.lua56
-rw-r--r--lexlua/diff.lua32
-rw-r--r--lexlua/django.lua55
-rw-r--r--lexlua/dmd.lua178
-rw-r--r--lexlua/dockerfile.lua41
-rw-r--r--lexlua/dot.lua54
-rw-r--r--lexlua/eiffel.lua60
-rw-r--r--lexlua/elixir.lua107
-rw-r--r--lexlua/erlang.lua89
-rw-r--r--lexlua/faust.lua47
-rw-r--r--lexlua/fish.lua58
-rw-r--r--lexlua/forth.lua56
-rw-r--r--lexlua/fortran.lua72
-rw-r--r--lexlua/fsharp.lua59
-rw-r--r--lexlua/gap.lua42
-rw-r--r--lexlua/gettext.lua31
-rw-r--r--lexlua/gherkin.lua41
-rw-r--r--lexlua/glsl.lua109
-rw-r--r--lexlua/gnuplot.lua59
-rw-r--r--lexlua/go.lua62
-rw-r--r--lexlua/groovy.lua69
-rw-r--r--lexlua/gtkrc.lua58
-rw-r--r--lexlua/haskell.lua45
-rw-r--r--lexlua/html.lua149
-rw-r--r--lexlua/html2.lua147
-rw-r--r--lexlua/icon.lua61
-rw-r--r--lexlua/idl.lua51
-rw-r--r--lexlua/inform.lua72
-rw-r--r--lexlua/ini.lua43
-rw-r--r--lexlua/io_lang.lua51
-rw-r--r--lexlua/java.lua66
-rw-r--r--lexlua/javascript.lua50
-rw-r--r--lexlua/json.lua39
-rw-r--r--lexlua/jsp.lua20
-rw-r--r--lexlua/latex.lua58
-rw-r--r--lexlua/ledger.lua48
-rw-r--r--lexlua/less.lua21
-rw-r--r--lexlua/lexer.lua1865
-rw-r--r--lexlua/lexer2.lua1723
-rw-r--r--lexlua/lilypond.lua31
-rw-r--r--lexlua/lisp.lua65
-rw-r--r--lexlua/litcoffee.lua22
-rw-r--r--lexlua/logtalk.lua35
-rw-r--r--lexlua/lua.lua159
-rw-r--r--lexlua/makefile.lua90
-rw-r--r--lexlua/man.lua29
-rw-r--r--lexlua/markdown.lua102
-rw-r--r--lexlua/matlab.lua86
-rw-r--r--lexlua/moonscript.lua141
-rw-r--r--lexlua/mumps.lua112
-rw-r--r--lexlua/myrddin.lua54
-rw-r--r--lexlua/nemerle.lua66
-rw-r--r--lexlua/nim.lua101
-rw-r--r--lexlua/nsis.lua146
-rw-r--r--lexlua/null.lua4
-rw-r--r--lexlua/objective_c.lua71
-rw-r--r--lexlua/pascal.lua62
-rw-r--r--lexlua/perl.lua142
-rw-r--r--lexlua/php.lua75
-rw-r--r--lexlua/pico8.lua39
-rw-r--r--lexlua/pike.lua56
-rw-r--r--lexlua/pkgbuild.lua79
-rw-r--r--lexlua/powershell.lua63
-rw-r--r--lexlua/prolog.lua129
-rw-r--r--lexlua/props.lua33
-rw-r--r--lexlua/protobuf.lua45
-rw-r--r--lexlua/ps.lua47
-rw-r--r--lexlua/ps.lua.orig167
-rw-r--r--lexlua/pure.lua50
-rw-r--r--lexlua/python.lua104
-rw-r--r--lexlua/rails.lua54
-rw-r--r--lexlua/rc.lua54
-rw-r--r--lexlua/rebol.lua98
-rw-r--r--lexlua/rest.lua259
-rw-r--r--lexlua/rexx.lua76
-rw-r--r--lexlua/rhtml.lua20
-rw-r--r--lexlua/rstats.lua42
-rw-r--r--lexlua/ruby.lua132
-rw-r--r--lexlua/rust.lua68
-rw-r--r--lexlua/sass.lua24
-rw-r--r--lexlua/scala.lua61
-rw-r--r--lexlua/scheme.lua80
-rw-r--r--lexlua/smalltalk.lua46
-rw-r--r--lexlua/sml.lua113
-rw-r--r--lexlua/snobol4.lua65
-rw-r--r--lexlua/sql.lua59
-rw-r--r--lexlua/taskpaper.lua60
-rw-r--r--lexlua/tcl.lua49
-rw-r--r--lexlua/template.txt38
-rw-r--r--lexlua/tex.lua34
-rw-r--r--lexlua/texinfo.lua222
-rw-r--r--lexlua/text.lua4
-rw-r--r--lexlua/themes/curses.lua55
-rw-r--r--lexlua/themes/dark.lua89
-rw-r--r--lexlua/themes/light.lua89
-rw-r--r--lexlua/themes/scite.lua53
-rw-r--r--lexlua/toml.lua53
-rw-r--r--lexlua/vala.lua60
-rw-r--r--lexlua/vb.lua53
-rw-r--r--lexlua/vbscript.lua53
-rw-r--r--lexlua/vcard.lua101
-rw-r--r--lexlua/verilog.lua86
-rw-r--r--lexlua/vhdl.lua69
-rw-r--r--lexlua/wsf.lua101
-rw-r--r--lexlua/xml.lua88
-rw-r--r--lexlua/xtend.lua90
-rw-r--r--lexlua/yaml.lua120
-rw-r--r--lua/LICENSE19
-rw-r--r--lua/README7
-rw-r--r--lua/doc/lua.css164
-rw-r--r--lua/doc/manual.css21
-rw-r--r--lua/doc/manual.html10985
-rw-r--r--lua/src/lapi.c1298
-rw-r--r--lua/src/lapi.h24
-rw-r--r--lua/src/lauxlib.c1043
-rw-r--r--lua/src/lauxlib.h264
-rw-r--r--lua/src/lbaselib.c498
-rw-r--r--lua/src/lbitlib.c233
-rw-r--r--lua/src/lcode.c1203
-rw-r--r--lua/src/lcode.h88
-rw-r--r--lua/src/lcorolib.c168
-rw-r--r--lua/src/lctype.c55
-rw-r--r--lua/src/lctype.h95
-rw-r--r--lua/src/ldblib.c456
-rw-r--r--lua/src/ldebug.c698
-rw-r--r--lua/src/ldebug.h39
-rw-r--r--lua/src/ldo.c802
-rw-r--r--lua/src/ldo.h58
-rw-r--r--lua/src/ldump.c215
-rw-r--r--lua/src/lfunc.c151
-rw-r--r--lua/src/lfunc.h61
-rw-r--r--lua/src/lgc.c1178
-rw-r--r--lua/src/lgc.h147
-rw-r--r--lua/src/linit.c68
-rw-r--r--lua/src/liolib.c771
-rw-r--r--lua/src/llex.c565
-rw-r--r--lua/src/llex.h85
-rw-r--r--lua/src/llimits.h323
-rw-r--r--lua/src/lmathlib.c410
-rw-r--r--lua/src/lmem.c100
-rw-r--r--lua/src/lmem.h69
-rw-r--r--lua/src/loadlib.c790
-rw-r--r--lua/src/lobject.c521
-rw-r--r--lua/src/lobject.h549
-rw-r--r--lua/src/lopcodes.c124
-rw-r--r--lua/src/lopcodes.h297
-rw-r--r--lua/src/loslib.c407
-rw-r--r--lua/src/lparser.c1650
-rw-r--r--lua/src/lparser.h133
-rw-r--r--lua/src/lpcap.c537
-rw-r--r--lua/src/lpcap.h43
-rw-r--r--lua/src/lpcode.c986
-rw-r--r--lua/src/lpcode.h42
-rw-r--r--lua/src/lpprint.c244
-rw-r--r--lua/src/lpprint.h36
-rw-r--r--lua/src/lprefix.h45
-rw-r--r--lua/src/lptree.c1296
-rw-r--r--lua/src/lptree.h77
-rw-r--r--lua/src/lptypes.h149
-rw-r--r--lua/src/lpvm.c355
-rw-r--r--lua/src/lpvm.h58
-rw-r--r--lua/src/lstate.c347
-rw-r--r--lua/src/lstate.h235
-rw-r--r--lua/src/lstring.c248
-rw-r--r--lua/src/lstring.h49
-rw-r--r--lua/src/lstrlib.c1584
-rw-r--r--lua/src/ltable.c669
-rw-r--r--lua/src/ltable.h66
-rw-r--r--lua/src/ltablib.c450
-rw-r--r--lua/src/ltm.c165
-rw-r--r--lua/src/ltm.h76
-rw-r--r--lua/src/lua.h486
-rw-r--r--lua/src/lua.hpp9
-rw-r--r--lua/src/luaconf.h783
-rw-r--r--lua/src/lualib.h61
-rw-r--r--lua/src/lundump.c279
-rw-r--r--lua/src/lundump.h32
-rw-r--r--lua/src/lutf8lib.c256
-rw-r--r--lua/src/lvm.c1322
-rw-r--r--lua/src/lvm.h113
-rw-r--r--lua/src/lzio.c68
-rw-r--r--lua/src/lzio.h66
-rw-r--r--src/Catalogue.cxx1
-rw-r--r--win32/makefile19
-rw-r--r--win32/scintilla.mak3
223 files changed, 55163 insertions, 30 deletions
diff --git a/cocoa/ScintillaFramework/ScintillaFramework.xcodeproj/project.pbxproj b/cocoa/ScintillaFramework/ScintillaFramework.xcodeproj/project.pbxproj
index 309416ec6..0a8ea2723 100644
--- a/cocoa/ScintillaFramework/ScintillaFramework.xcodeproj/project.pbxproj
+++ b/cocoa/ScintillaFramework/ScintillaFramework.xcodeproj/project.pbxproj
@@ -224,6 +224,7 @@
8DC2EF530486A6940098B216 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 089C1666FE841158C02AAC07 /* InfoPlist.strings */; };
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; };
F437405F9F32C7DEFCA38C11 /* LexIndent.cxx in Sources */ = {isa = PBXBuildFile; fileRef = 282E41F3B9E2BFEDD6A05BE7 /* LexIndent.cxx */; };
+ 4D0C4365AB6DF998CD48B1FC /* LexLPeg.cxx in Sources */ = {isa = PBXBuildFile; fileRef = 5EB3467789767C0ACE40A46A /* LexLPeg.cxx */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@@ -449,6 +450,7 @@
8DC2EF5B0486A6940098B216 /* Scintilla.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Scintilla.framework; sourceTree = BUILT_PRODUCTS_DIR; };
D2F7E79907B2D74100F64583 /* CoreData.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreData.framework; path = /System/Library/Frameworks/CoreData.framework; sourceTree = "<absolute>"; };
282E41F3B9E2BFEDD6A05BE7 /* LexIndent.cxx */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = LexIndent.cxx; path = ../../lexers/LexIndent.cxx; sourceTree = SOURCE_ROOT; };
+ 5EB3467789767C0ACE40A46A /* LexLPeg.cxx */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = LexLPeg.cxx; path = ../../lexers/LexLPeg.cxx; sourceTree = SOURCE_ROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -598,6 +600,7 @@
1102C31B169FB49300DC16AB /* LexLaTeX.cxx */,
114B6EE011FA7526004FB6AB /* LexLisp.cxx */,
114B6EE111FA7526004FB6AB /* LexLout.cxx */,
+ 5EB3467789767C0ACE40A46A /* LexLPeg.cxx */,
114B6EE211FA7526004FB6AB /* LexLua.cxx */,
114B6EE311FA7526004FB6AB /* LexMagik.cxx */,
28B647091B54C0720009DC49 /* LexMake.cxx */,
@@ -1096,6 +1099,7 @@
1160E0381803651C00BCEBCB /* LexRust.cxx in Sources */,
11FF3FE21810EB3900E13F13 /* LexDMAP.cxx in Sources */,
F437405F9F32C7DEFCA38C11 /* LexIndent.cxx in Sources */,
+ 4D0C4365AB6DF998CD48B1FC /* LexLPeg.cxx in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
diff --git a/curses/Makefile b/curses/Makefile
index 42a8e4e0c..77bfd14dd 100644
--- a/curses/Makefile
+++ b/curses/Makefile
@@ -3,10 +3,11 @@
.SUFFIXES: .cxx .c .o .h .a
AR = ar
+CC = gcc
CXX = g++
INCLUDEDIRS = -I ../include -I ../src -I ../lexlib
-CXXFLAGS = -std=c++11 -pedantic -DCURSES -DSCI_LEXER $(INCLUDEDIRS) -Wall \
- -Wextra -Wno-missing-field-initializers
+CFLAGS = -std=c99 -pedantic -Wall
+CXXFLAGS = -std=c++11 -pedantic -DCURSES -DSCI_LEXER $(INCLUDEDIRS) -Wall
ifdef DEBUG
CXXFLAGS += -DDEBUG -g
else
@@ -15,23 +16,35 @@ endif
CURSES_FLAGS =
scintilla = ../bin/scintilla.a
+sci = AutoComplete.o CallTip.o CaseConvert.o CaseFolder.o Catalogue.o \
+ CellBuffer.o CharacterCategory.o CharClassify.o ContractionState.o \
+ Decoration.o Document.o EditModel.o Editor.o EditView.o ExternalLexer.o \
+ Indicator.o KeyMap.o LineMarker.o MarginView.o PerLine.o PositionCache.o \
+ RESearch.o RunStyles.o ScintillaBase.o Selection.o Style.o \
+ UniConversion.o ViewStyle.o XPM.o \
+ Accessor.o CharacterSet.o LexerBase.o LexerModule.o LexerNoExceptions.o \
+ LexerSimple.o PropSetSimple.o StyleContext.o WordList.o
lexers = $(addsuffix .o,$(basename $(sort $(notdir $(wildcard ../lexers/Lex*.cxx)))))
+ifdef LPEG_LEXER
+ CXXFLAGS += -DLPEG_LEXER -I ../lua/src
+ LUA_CFLAGS = -I ../lua/src -DLUA_USE_POSIX -DLUA_USE_DLOPEN
+ lua = lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o linit.o \
+ llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o \
+ ltable.o ltm.o lundump.o lvm.o lzio.o \
+ lauxlib.o lbaselib.o lbitlib.o lcorolib.o ldblib.o liolib.o lmathlib.o \
+ loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o \
+ lpcap.o lpcode.o lpprint.o lptree.o lpvm.o
+endif
vpath %.h ../src ../include ../lexlib
vpath %.cxx ../src ../lexlib ../lexers
all: $(scintilla)
-.cxx.o:
+$(sci) $(lexers) ScintillaCurses.o: %.o: %.cxx
$(CXX) $(CXXFLAGS) $(CURSES_FLAGS) -c $<
-$(scintilla): AutoComplete.o CallTip.o CaseConvert.o CaseFolder.o Catalogue.o \
- CellBuffer.o CharacterCategory.o CharClassify.o \
- ContractionState.o Decoration.o Document.o EditModel.o Editor.o \
- EditView.o ExternalLexer.o Indicator.o KeyMap.o LineMarker.o \
- MarginView.o PerLine.o PositionCache.o RESearch.o RunStyles.o \
- ScintillaBase.o Selection.o Style.o UniConversion.o ViewStyle.o \
- XPM.o Accessor.o CharacterSet.o LexerBase.o LexerModule.o \
- LexerNoExceptions.o LexerSimple.o PropSetSimple.o StyleContext.o \
- WordList.o $(lexers) ScintillaCurses.o
+$(lua): %.o: ../lua/src/%.c
+ $(CC) $(CFLAGS) $(LUA_CFLAGS) -c $<
+$(scintilla): $(sci) $(lexers) $(lua) ScintillaCurses.o
$(AR) rc $@ $^
touch $@
clean:
diff --git a/curses/ScintillaCurses.cxx b/curses/ScintillaCurses.cxx
index 3e78f1cca..8d0c99d47 100644
--- a/curses/ScintillaCurses.cxx
+++ b/curses/ScintillaCurses.cxx
@@ -80,7 +80,7 @@ Font::~Font() {}
* The curses attributes are not constructed from various fields in *fp* since
* there is no `underline` parameter. Instead, you need to manually set the
* `weight` parameter to be the union of your desired attributes.
- * Scintillua (http://foicica.com/scintillua) has an example of this.
+ * Scintilla's lexers/LexLPeg.cxx has an example of this.
*/
void Font::Create(const FontParameters &fp) {
Release();
diff --git a/curses/jinx/Makefile b/curses/jinx/Makefile
index 6811d4f7e..cd34e14cf 100644
--- a/curses/jinx/Makefile
+++ b/curses/jinx/Makefile
@@ -3,17 +3,15 @@
CC = gcc
CXX = g++
INCLUDEDIRS = -I ../../include -I ../../src -I ../../lexlib -I ../
-CFLAGS = -DCURSES -DSCI_LEXER -D_XOPEN_SOURCE_EXTENDED -W -Wall $(INCLUDEDIRS) \
- -Wno-unused-parameter
+CFLAGS = -DCURSES -DSCI_LEXER -Wall $(INCLUDEDIRS)
CXXFLAGS = $(CFLAGS)
+ifdef LPEG_LEXER
+ CFLAGS += -DLPEG_LEXER -I ../src/lua
+endif
-scintilla = ../../bin/scintilla_curses.a
-lexers = $(wildcard ../Lex*.o)
+scintilla = ../../bin/scintilla.a
all: jinx
-jinx.o: jinx.c
- $(CC) $(CFLAGS) -c $<
-jinx: jinx.o $(lexers) $(scintilla)
- $(CXX) -DCURSES $^ -o $@ -lncursesw
-clean:
- rm -f jinx *.o
+jinx.o: jinx.c ; $(CC) $(CFLAGS) -c $<
+jinx: jinx.o $(scintilla) ; $(CXX) $^ -o $@ -lncursesw
+clean: ; rm -f jinx *.o
diff --git a/curses/jinx/jinx.c b/curses/jinx/jinx.c
index 83a955cb4..bea88f67f 100644
--- a/curses/jinx/jinx.c
+++ b/curses/jinx/jinx.c
@@ -24,6 +24,7 @@ int main(int argc, char **argv) {
SSM(SCI_STYLESETFORE, STYLE_DEFAULT, 0xFFFFFF);
SSM(SCI_STYLESETBACK, STYLE_DEFAULT, 0);
SSM(SCI_STYLECLEARALL, 0, 0);
+#if !LPEG_LEXER
SSM(SCI_SETLEXER, SCLEX_CPP, 0);
SSM(SCI_SETKEYWORDS, 0, (sptr_t)"int char");
SSM(SCI_STYLESETFORE, SCE_C_COMMENT, 0x00FF00);
@@ -32,6 +33,15 @@ int main(int argc, char **argv) {
SSM(SCI_STYLESETFORE, SCE_C_WORD, 0xFF0000);
SSM(SCI_STYLESETFORE, SCE_C_STRING, 0xFF00FF);
SSM(SCI_STYLESETBOLD, SCE_C_OPERATOR, 1);
+#else
+ SSM(SCI_SETLEXER, SCLEX_LPEG, 0);
+ SSM(SCI_SETPROPERTY, (uptr_t)"lexer.lpeg.home", (sptr_t)"../../lexlua");
+ SSM(SCI_SETPROPERTY, (uptr_t)"lexer.lpeg.color.theme", (sptr_t)"curses");
+ SSM(SCI_PRIVATELEXERCALL, SCI_GETDIRECTFUNCTION,
+ SSM(SCI_GETDIRECTFUNCTION, 0, 0));
+ SSM(SCI_PRIVATELEXERCALL, SCI_SETDOCPOINTER, SSM(SCI_GETDIRECTPOINTER, 0, 0));
+ SSM(SCI_PRIVATELEXERCALL, SCI_SETLEXERLANGUAGE, (sptr_t)"ansi_c");
+#endif
SSM(SCI_INSERTTEXT, 0, (sptr_t)
"int main(int argc, char **argv) {\n"
" // Start up the gnome\n"
diff --git a/doc/LPegLexer.html b/doc/LPegLexer.html
new file mode 100644
index 000000000..1a0049799
--- /dev/null
+++ b/doc/LPegLexer.html
@@ -0,0 +1,2608 @@
+<?xml version="1.0"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Lua LPeg Lexers</title>
+
+ <style type="text/css">
+ <!--
+ /*<![CDATA[*/
+ CODE { font-weight: bold; font-family: Menlo,Consolas,Bitstream Vera Sans Mono,Courier New,monospace; }
+ A:visited { color: blue; }
+ A:hover { text-decoration: underline ! important; }
+ A.message { text-decoration: none; font-weight: bold; font-family: Menlo,Consolas,Bitstream Vera Sans Mono,Courier New,monospace; }
+ A.seealso { text-decoration: none; font-weight: bold; font-family: Menlo,Consolas,Bitstream Vera Sans Mono,Courier New,monospace; }
+ A.toc { text-decoration: none; }
+ A.jump { text-decoration: none; }
+ LI.message { text-decoration: none; font-weight: bold; font-family: Menlo,Consolas,Bitstream Vera Sans Mono,Courier New,monospace; }
+ H2 { background: #E0EAFF; }
+
+ table {
+ border: 0px;
+ border-collapse: collapse;
+ }
+
+ table.categories {
+ border: 0px;
+ border-collapse: collapse;
+ }
+ table.categories td {
+ padding: 4px 12px;
+ }
+
+ table.standard {
+ border-collapse: collapse;
+ }
+ table.standard th {
+ background: #404040;
+ color: #FFFFFF;
+ padding: 1px 5px 1px 5px;
+ }
+ table.standard tr:nth-child(odd) {background: #D7D7D7}
+ table.standard tr:nth-child(even) {background: #F0F0F0}
+ table.standard td {
+ padding: 1px 5px 1px 5px;
+ }
+
+ .S0 {
+ color: #808080;
+ }
+ .S2 {
+ font-family: 'Comic Sans MS';
+ color: #007F00;
+ font-size: 9pt;
+ }
+ .S3 {
+ font-family: 'Comic Sans MS';
+ color: #3F703F;
+ font-size: 9pt;
+ }
+ .S4 {
+ color: #007F7F;
+ }
+ .S5 {
+ font-weight: bold;
+ color: #00007F;
+ }
+ .S9 {
+ color: #7F7F00;
+ }
+ .S10 {
+ font-weight: bold;
+ color: #000000;
+ }
+ .S17 {
+ font-family: 'Comic Sans MS';
+ color: #3060A0;
+ font-size: 9pt;
+ }
+ DIV.highlighted {
+ background: #F7FCF7;
+ border: 1px solid #C0D7C0;
+ margin: 0.3em 3em;
+ padding: 0.3em 0.6em;
+ font-family: 'Verdana';
+ color: #000000;
+ font-size: 10pt;
+ }
+ .provisional {
+ background: #FFB000;
+ }
+ .parameter {
+ font-style:italic;
+ }
+ /*]]>*/
+ -->
+ </style>
+ </head>
+
+ <body bgcolor="#FFFFFF" text="#000000">
+ <table bgcolor="#000000" width="100%" cellspacing="0" cellpadding="0" border="0"
+ summary="Banner">
+ <tr>
+ <td><img src="SciTEIco.png" border="3" height="64" width="64" alt="Scintilla icon" /></td>
+
+ <td><a href="index.html"
+ style="color:white;text-decoration:none;font-size:200%">Scintilla</a></td>
+ </tr>
+ </table>
+
+ <h1>Lua LPeg Lexers</h1>
+
+ <p>Scintilla's LPeg lexer adds dynamic <a href="http://lua.org">Lua</a>
+ <a href="http://www.inf.puc-rio.br/~roberto/lpeg/">LPeg</a> lexers to
+ Scintilla. It is the quickest way to add new or customized syntax
+ highlighting and code folding for programming languages to any
+ Scintilla-based text editor or IDE.</p>
+
+ <h2>Features</h2>
+
+ <ul>
+ <li>Support for <a href="#LexerList">over 100 programming languages</a>.</li>
+ <li>Easy lexer embedding for multi-language lexers.</li>
+ <li>Universal color themes.</li>
+ <li>Comparable speed to native Scintilla lexers.</li>
+ </ul>
+
+ <h2>Enabling and Configuring the LPeg Lexer</h2>
+
+ <p>Scintilla is <em>not</em> compiled with the LPeg lexer enabled by
+ default (it is present, but empty). You need to manually enable it with the
+ <code>LPEG_LEXER</code> flag when building Scintilla and its lexers. You
+ also need to build and link the Lua source files contained in Scintilla's
+ <code>lua/src/</code> directory to <code>lexers/LexLPeg.cxx</code>. If your
+ application has its own copy of Lua, you can ignore Scintilla's copy and
+ link to yours.
+
+ <p>At this time, only the GTK, curses, and MinGW32 (for win32) platform
+ makefiles facilitate enabling the LPeg lexer. For example, when building
+ Scintilla, run <code>make LPEG_LEXER=1</code>. User contributions to
+ facilitate this for the other platforms is encouraged.</p>
+
+ <p>When Scintilla is compiled with the LPeg lexer enabled, and after
+ selecting it as the lexer to use via
+ <a class="message" href="ScintillaDoc.html#SCI_SETLEXER">SCI_SETLEXER</a> or
+ <a class="message" href="ScintillaDoc.html#SCI_SETLEXERLANGUAGE">SCI_SETLEXERLANGUAGE</a>,
+ the following property <em>must</em> be set via
+ <a class="message" href="ScintillaDoc.html#SCI_SETPROPERTY">SCI_SETPROPERTY</a>:</p>
+
+ <table class="standard" summary="Search flags">
+ <tbody>
+ <tr>
+ <td><code>lexer.lpeg.home</code></td>
+
+ <td>The directory containing the Lua lexers. This is the path
+ where you included Scintilla's <code>lexlua/</code> directory in
+ your application's installation location.</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <p>The following properties are optional and may or may not be set:</p>
+
+ <table class="standard" summary="Search flags">
+ <tbody>
+ <tr>
+ <td><code>lexer.lpeg.color.theme</code></td>
+
+ <td>The color theme to use. Color themes are located in the
+ <code>lexlua/themes/</code> directory. Currently supported themes
+ are <code>light</code>, <code>dark</code>, <code>scite</code>, and
+ <code>curses</code>. Your application can define colors and styles
+ manually through Scintilla properties. The theme files have
+ examples.</td>
+ </tr>
+
+ <tr>
+ <td><code>fold</code></td>
+
+ <td>For Lua lexers that have a folder, folding is turned on if
+ <code>fold</code> is set to <code>1</code>. The default is
+ <code>0</code>.</td>
+ </tr>
+
+ <tr>
+ <td><code>fold.by.indentation</code</td>
+
+ <td>For Lua lexers that do not have a folder, if
+ <code>fold.by.indentation</code> is set to <code>1</code>, folding is
+ done based on indentation level (like Python). The default is
+ <code>0</code>.</td>
+ </tr>
+
+ <tr>
+ <td><code>fold.line.comments</code></td>
+
+ <td>If <code>fold.line.comments</code> is set to <code>1</code>,
+ multiple, consecutive line comments are folded, and only the top-level
+ comment is shown. There is a small performance penalty for large
+ source files when this option and folding are enabled. The default is
+ <code>0</code>.</td>
+ </tr>
+
+ <tr>
+ <td><code>fold.on.zero.sum.lines</code></td>
+
+ <td>If <code>fold.on.zero.sum.lines</code> is set to <code>1</code>,
+ lines that contain both an ending and starting fold point are marked
+ as fold points. For example, the C line <code>} else {</code> would be
+ marked as a fold point. The default is <code>0</code>.</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <h2>Using the LPeg Lexer</h2>
+
+ <p>Your application communicates with the LPeg lexer using Scintilla's
+ <a class="message" href="ScintillaDoc.html#SCI_PRIVATELEXERCALL"><code>SCI_PRIVATELEXERCALL</code></a>
+ API. The operation constants recognized by the LPeg lexer are based on
+ Scintilla's existing named constants. Note that some of the names of the
+ operations do not make perfect sense. This is a tradeoff in order to reuse
+ Scintilla's existing constants.</p>
+
+ <p>In the descriptions that follow,
+ <code>SCI_PRIVATELEXERCALL(int operation, void *pointer)</code> means you
+ would call Scintilla like
+ <code>SendScintilla(sci, SCI_PRIVATELEXERCALL, operation, pointer);</code></p>
+
+ <h3>Usage Example</h3>
+
+ <p>The curses platform demo, jinx, has a C-source example for using the LPeg
+ lexer. Additionally, here is a pseudo-code example:</p>
+
+ <pre><code>
+ init_app() {
+ sci = scintilla_new()
+ }
+
+ create_doc() {
+ doc = SendScintilla(sci, SCI_CREATEDOCUMENT, 0, 0)
+ SendScintilla(sci, SCI_SETDOCPOINTER, 0, doc)
+ SendScintilla(sci, SCI_SETLEXERLANGUAGE, 0, "lpeg")
+ home = "/home/mitchell/app/lua_lexers"
+ SendScintilla(sci, SCI_SETPROPERTY, "lexer.lpeg.home", home)
+ SendScintilla(sci, SCI_SETPROPERTY, "lexer.lpeg.color.theme", "light")
+ fn = SendScintilla(sci, SCI_GETDIRECTFUNCTION, 0, 0)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_GETDIRECTFUNCTION, fn)
+ psci = SendScintilla(sci, SCI_GETDIRECTPOINTER, 0, 0)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETDOCPOINTER, psci)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETLEXERLANGUAGE, "lua")
+ }
+
+ set_lexer(lang) {
+ psci = SendScintilla(sci, SCI_GETDIRECTPOINTER, 0, 0)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETDOCPOINTER, psci)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETLEXERLANGUAGE, lang)
+ }
+ </code></pre>
+
+ <code><a class="message" href="#SCI_CHANGELEXERSTATE">SCI_PRIVATELEXERCALL(SCI_CHANGELEXERSTATE, lua_State *L)</a><br/>
+ <a class="message" href="#SCI_GETDIRECTFUNCTION">SCI_PRIVATELEXERCALL(SCI_GETDIRECTFUNCTION, int SciFnDirect)</a><br/>
+ <a class="message" href="#SCI_GETLEXERLANGUAGE">SCI_PRIVATELEXERCALL(SCI_GETLEXERLANGUAGE, char *languageName) &rarr; int</a><br/>
+ <a class="message" href="#SCI_GETSTATUS">SCI_PRIVATELEXERCALL(SCI_GETSTATUS, char *errorMessage) &rarr; int</a><br/>
+ <a class="message" href="#styleNum">SCI_PRIVATELEXERCALL(int styleNum, char *styleName) &rarr; int</a><br/>
+ <a class="message" href="#SCI_SETDOCPOINTER">SCI_PRIVATELEXERCALL(SCI_SETDOCPOINTER, int sci)</a><br/>
+ <a class="message" href="#SCI_SETLEXERLANGUAGE">SCI_PRIVATELEXERCALL(SCI_SETLEXERLANGUAGE, languageName)</a><br/>
+ </code>
+
+ <p><b id="SCI_CHANGELEXERSTATE">SCI_PRIVATELEXERCALL(SCI_CHANGELEXERSTATE, lua_State *L)</b><br/>
+ Tells the LPeg lexer to use <code>L</code> as its Lua state instead of
+ creating a separate state.</p>
+
+ <p><code>L</code> must have already opened the "base", "string", "table",
+ "package", and "lpeg" libraries. If <code>L</code> is a Lua 5.1 state, it
+ must have also opened the "io" library.</p>
+
+ <p>The LPeg lexer will create a single <code>lexer</code> package (that can
+ be used with Lua's <code>require</code> function), as well as a number of
+ other variables in the <code>LUA_REGISTRYINDEX</code> table with the "sci_"
+ prefix.</p>
+
+ <p>Rather than including the path to Scintilla's Lua lexers in the
+ <code>package.path</code> of the given Lua state, set the "lexer.lpeg.home"
+ property instead. The LPeg lexer uses that property to find and load
+ lexers.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ lua = luaL_newstate()
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_CHANGELEXERSTATE, lua)
+ </code></pre>
+
+ <p><b id="SCI_GETDIRECTFUNCTION">SCI_PRIVATELEXERCALL(SCI_GETDIRECTFUNCTION, SciFnDirect)</b><br/>
+ Tells the LPeg lexer the address of <code>SciFnDirect</code>, the function
+ that handles Scintilla messages.</p>
+
+ <p>Despite the name <code>SCI_GETDIRECTFUNCTION</code>, it only notifies the
+ LPeg lexer what the value of <code>SciFnDirect</code> obtained from
+ <a class="message" href="ScintillaDoc.html#SCI_GETDIRECTFUNCTION"><code>SCI_GETDIRECTFUNCTION</code></a>
+ is. It does not return anything. Use this if you would like to have the LPeg
+ lexer set all Lua lexer styles automatically. This is useful for maintaining
+ a consistent color theme. Do not use this if your application maintains its
+ own color theme.</p>
+
+ <p>If you use this call, it <em>must</em> be made <em>once</em> for each
+ Scintilla document that was created using Scintilla's
+ <a class="message" href="ScintillaDoc.html#SCI_CREATEDOCUMENT"><code>SCI_CREATEDOCUMENT</code></a>.
+ You must also use the
+ <a class="message" href="#SCI_SETDOCPOINTER"><code>SCI_SETDOCPOINTER</code></a> LPeg lexer
+ API call.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ fn = SendScintilla(sci, SCI_GETDIRECTFUNCTION, 0, 0)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_GETDIRECTFUNCTION, fn)
+ </code></pre>
+
+ <p>See also: <a class="message" href="#SCI_SETDOCPOINTER"><code>SCI_SETDOCPOINTER</code></a></p>
+
+ <p><b id="SCI_GETLEXERLANGUAGE">SCI_PRIVATELEXERCALL(SCI_GETLEXERLANGUAGE, char *languageName) &rarr; int</b><br/>
+ Returns the length of the string name of the current Lua lexer or stores the
+ name into the given buffer. If the buffer is long enough, the name is
+ terminated by a <code>0</code> character.</p>
+
+ <p>For parent lexers with embedded children or child lexers embedded into
+ parents, the name is in "lexer/current" format, where "lexer" is the actual
+ lexer's name and "current" is the parent or child lexer at the current caret
+ position. In order for this to work, you must have called
+ <a class="message" href="#SCI_GETDIRECTFUNCTION"><code>SCI_GETDIRECTFUNCTION</code></a>
+ and
+ <a class="message" href="#SCI_SETDOCPOINTER"><code>SCI_SETDOCPOINTER</code></a>.</p>
+
+ <p><b id="SCI_GETSTATUS">SCI_PRIVATELEXERCALL(SCI_GETSTATUS, char *errorMessage) &rarr; int</b><br/>
+ Returns the length of the error message of the LPeg lexer or Lua lexer error
+ that occurred (if any), or stores the error message into the given buffer.</p>
+
+ <p>If no error occurred, the returned message will be empty.</p>
+
+ <p>Since the LPeg lexer does not throw errors as they occur, errors can only
+ be handled passively. Note that the LPeg lexer does print all errors to
+ stderr.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_GETSTATUS, errmsg)
+ if (strlen(errmsg) &gt; 0) { /* handle error */ }
+ </code></pre>
+
+ <p><b id="SCI_PRIVATELEXERCALL">SCI_PRIVATELEXERCALL(int styleNum, char *styleName) &rarr; int</b><br/>
+ Returns the length of the token name associated with the given style number
+ or stores the style name into the given buffer. If the buffer is long
+ enough, the string is terminated by a <code>0</code> character.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ style = SendScintilla(sci, SCI_GETSTYLEAT, pos, 0)
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, style, token)
+ // token now contains the name of the style at pos
+ </code></pre>
+
+ <p><b id="SCI_SETDOCPOINTER">SCI_PRIVATELEXERCALL(SCI_SETDOCPOINTER, int sci)</b><br/>
+ Tells the LPeg lexer the address of the Scintilla window (obtained via
+ Scintilla's
+ <a class="message" href="ScintillaDoc.html#SCI_GETDIRECTPOINTER"><code>SCI_GETDIRECTPOINTER</code></a>)
+ currently in use.</p>
+
+ <p>Despite the name <code>SCI_SETDOCPOINTER</code>, it has no relationship
+ to Scintilla documents.</p>
+
+ <p>Use this call only if you are using the
+ <a class="message" href="#SCI_GETDIRECTFUNCTION"><code>SCI_GETDIRECTFUNCTION</code></a>
+ LPeg lexer API call. It <em>must</em> be made <em>before</em> each call to
+ the <a class="message" href="#SCI_SETLEXERLANGUAGE"><code>SCI_SETLEXERLANGUAGE</code></a>
+ LPeg lexer API call.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETDOCPOINTER, sci)
+ </code></pre>
+
+ <p>See also: <a class="message" href="#SCI_GETDIRECTFUNCTION"><code>SCI_GETDIRECTFUNCTION</code></a>,
+ <a class="message" href="#SCI_SETLEXERLANGUAGE"><code>SCI_SETLEXERLANGUAGE</code></a></p>
+
+ <p><b id="SCI_SETLEXERLANGUAGE">SCI_PRIVATELEXERCALL(SCI_SETLEXERLANGUAGE, const char *languageName)</b><br/>
+ Sets the current Lua lexer to <code>languageName</code>.</p>
+
+ <p>If you are having the LPeg lexer set the Lua lexer styles automatically,
+ make sure you call the
+ <a class="message" href="#SCI_SETDOCPOINTER"><code>SCI_SETDOCPOINTER</code></a>
+ LPeg lexer API <em>first</em>.</p>
+
+ <p>Usage:</p>
+
+ <pre><code>
+ SendScintilla(sci, SCI_PRIVATELEXERCALL, SCI_SETLEXERLANGUAGE, "lua")
+ </code></pre>
+
+ <p>See also: <a class="message" href="#SCI_SETDOCPOINTER"><code>SCI_SETDOCPOINTER</code></a></p>
+
+ <h2 id="lexer">Writing Lua Lexers</h2>
+
+ <p>Lexers highlight the syntax of source code. Scintilla (the editing component
+ behind <a href="http://foicica.com/textadept">Textadept</a>) traditionally uses static, compiled C++
+ lexers which are notoriously difficult to create and/or extend. On the other
+ hand, <a href="http://lua.org">Lua</a> makes it easy to to rapidly create new lexers, extend existing
+ ones, and embed lexers within one another. Lua lexers tend to be more
+ readable than C++ lexers too.</p>
+
+ <p>Lexers are Parsing Expression Grammars, or PEGs, composed with the Lua
+ <a href="http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html">LPeg library</a>. The following table comes from the LPeg documentation and
+ summarizes all you need to know about constructing basic LPeg patterns. This
+ module provides convenience functions for creating and working with other
+ more advanced patterns and concepts.</p>
+
+ <table class="standard">
+ <thead>
+ <tr>
+ <th>Operator </th>
+ <th> Description</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td><code>lpeg.P(string)</code> </td>
+ <td> Matches <code>string</code> literally.</td>
+ </tr>
+ <tr>
+ <td><code>lpeg.P(</code><em><code>n</code></em><code>)</code> </td>
+ <td> Matches exactly <em><code>n</code></em> characters.</td>
+ </tr>
+ <tr>
+ <td><code>lpeg.S(string)</code> </td>
+ <td> Matches any character in set <code>string</code>.</td>
+ </tr>
+ <tr>
+ <td><code>lpeg.R("</code><em><code>xy</code></em><code>")</code> </td>
+ <td> Matches any character between range <code>x</code> and <code>y</code>.</td>
+ </tr>
+ <tr>
+ <td><code>patt^</code><em><code>n</code></em> </td>
+ <td> Matches at least <em><code>n</code></em> repetitions of <code>patt</code>.</td>
+ </tr>
+ <tr>
+ <td><code>patt^-</code><em><code>n</code></em> </td>
+ <td> Matches at most <em><code>n</code></em> repetitions of <code>patt</code>.</td>
+ </tr>
+ <tr>
+ <td><code>patt1 * patt2</code> </td>
+ <td> Matches <code>patt1</code> followed by <code>patt2</code>.</td>
+ </tr>
+ <tr>
+ <td><code>patt1 + patt2</code> </td>
+ <td> Matches <code>patt1</code> or <code>patt2</code> (ordered choice).</td>
+ </tr>
+ <tr>
+ <td><code>patt1 - patt2</code> </td>
+ <td> Matches <code>patt1</code> if <code>patt2</code> does not match.</td>
+ </tr>
+ <tr>
+ <td><code>-patt</code> </td>
+ <td> Equivalent to <code>("" - patt)</code>.</td>
+ </tr>
+ <tr>
+ <td><code>#patt</code> </td>
+ <td> Matches <code>patt</code> but consumes no input.</td>
+ </tr>
+ </tbody>
+ </table>
+
+
+ <p>The first part of this document deals with rapidly constructing a simple
+ lexer. The next part deals with more advanced techniques, such as custom
+ coloring and embedding lexers within one another. Following that is a
+ discussion about code folding, or being able to tell Scintilla which code
+ blocks are "foldable" (temporarily hideable from view). After that are
+ instructions on how to use Lua lexers with the aforementioned Textadept
+ editor. Finally there are comments on lexer performance and limitations.</p>
+
+ <p><a id="lexer.Lexer.Basics"></a></p>
+
+ <h3>Lexer Basics</h3>
+
+ <p>The <em>lexlua/</em> directory contains all lexers, including your new one. Before
+ attempting to write one from scratch though, first determine if your
+ programming language is similar to any of the 100+ languages supported. If
+ so, you may be able to copy and modify that lexer, saving some time and
+ effort. The filename of your lexer should be the name of your programming
+ language in lower case followed by a <em>.lua</em> extension. For example, a new Lua
+ lexer has the name <em>lua.lua</em>.</p>
+
+ <p>Note: Try to refrain from using one-character language names like "c", "d",
+ or "r". For example, Lua lexers for those languages are named "ansi_c", "dmd", and "rstats",
+ respectively.</p>
+
+ <p><a id="lexer.New.Lexer.Template"></a></p>
+
+ <h4>New Lexer Template</h4>
+
+ <p>There is a <em>lexlua/template.txt</em> file that contains a simple template for a
+ new lexer. Feel free to use it, replacing the '?'s with the name of your
+ lexer. Consider this snippet from the template:</p>
+
+ <pre><code>
+ -- ? LPeg lexer.
+
+ local lexer = require('lexer')
+ local token, word_match = lexer.token, lexer.word_match
+ local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+ local lex = lexer.new('?')
+
+ -- Whitespace.
+ local ws = token(lexer.WHITESPACE, lexer.space^1)
+ lex:add_rule('whitespace', ws)
+
+ [...]
+
+ return lex
+ </code></pre>
+
+ <p>The first 3 lines of code simply define often used convenience variables. The
+ fourth and last lines <a href="#lexer.new">define</a> and return the lexer object
+ Scintilla uses; they are very important and must be part of every lexer. The
+ fifth line defines something called a "token", an essential building block of
+ lexers. You will learn about tokens shortly. The sixth line defines a lexer
+ grammar rule, which you will learn about later, as well as token styles. (Be
+ aware that it is common practice to combine these two lines for short rules.)
+ Note, however, the <code>local</code> prefix in front of variables, which is needed
+ so-as not to affect Lua's global environment. All in all, this is a minimal,
+ working lexer that you can build on.</p>
+
+ <p><a id="lexer.Tokens"></a></p>
+
+ <h4>Tokens</h4>
+
+ <p>Take a moment to think about your programming language's structure. What kind
+ of key elements does it have? In the template shown earlier, one predefined
+ element all languages have is whitespace. Your language probably also has
+ elements like comments, strings, and keywords. Lexers refer to these elements
+ as "tokens". Tokens are the fundamental "building blocks" of lexers. Lexers
+ break down source code into tokens for coloring, which results in the syntax
+ highlighting familiar to you. It is up to you how specific your lexer is when
+ it comes to tokens. Perhaps only distinguishing between keywords and
+ identifiers is necessary, or maybe recognizing constants and built-in
+ functions, methods, or libraries is desirable. The Lua lexer, for example,
+ defines 11 tokens: whitespace, keywords, built-in functions, constants,
+ built-in libraries, identifiers, strings, comments, numbers, labels, and
+ operators. Even though constants, built-in functions, and built-in libraries
+ are subsets of identifiers, Lua programmers find it helpful for the lexer to
+ distinguish between them all. It is perfectly acceptable to just recognize
+ keywords and identifiers.</p>
+
+ <p>In a lexer, tokens consist of a token name and an LPeg pattern that matches a
+ sequence of characters recognized as an instance of that token. Create tokens
+ using the <a href="#lexer.token"><code>lexer.token()</code></a> function. Let us examine the "whitespace" token
+ defined in the template shown earlier:</p>
+
+ <pre><code>
+ local ws = token(lexer.WHITESPACE, lexer.space^1)
+ </code></pre>
+
+ <p>At first glance, the first argument does not appear to be a string name and
+ the second argument does not appear to be an LPeg pattern. Perhaps you
+ expected something like:</p>
+
+ <pre><code>
+ local ws = token('whitespace', S('\t\v\f\n\r ')^1)
+ </code></pre>
+
+ <p>The <code>lexer</code> module actually provides a convenient list of common token names
+ and common LPeg patterns for you to use. Token names include
+ <a href="#lexer.DEFAULT"><code>lexer.DEFAULT</code></a>, <a href="#lexer.WHITESPACE"><code>lexer.WHITESPACE</code></a>, <a href="#lexer.COMMENT"><code>lexer.COMMENT</code></a>,
+ <a href="#lexer.STRING"><code>lexer.STRING</code></a>, <a href="#lexer.NUMBER"><code>lexer.NUMBER</code></a>, <a href="#lexer.KEYWORD"><code>lexer.KEYWORD</code></a>,
+ <a href="#lexer.IDENTIFIER"><code>lexer.IDENTIFIER</code></a>, <a href="#lexer.OPERATOR"><code>lexer.OPERATOR</code></a>, <a href="#lexer.ERROR"><code>lexer.ERROR</code></a>,
+ <a href="#lexer.PREPROCESSOR"><code>lexer.PREPROCESSOR</code></a>, <a href="#lexer.CONSTANT"><code>lexer.CONSTANT</code></a>, <a href="#lexer.VARIABLE"><code>lexer.VARIABLE</code></a>,
+ <a href="#lexer.FUNCTION"><code>lexer.FUNCTION</code></a>, <a href="#lexer.CLASS"><code>lexer.CLASS</code></a>, <a href="#lexer.TYPE"><code>lexer.TYPE</code></a>, <a href="#lexer.LABEL"><code>lexer.LABEL</code></a>,
+ <a href="#lexer.REGEX"><code>lexer.REGEX</code></a>, and <a href="#lexer.EMBEDDED"><code>lexer.EMBEDDED</code></a>. Patterns include
+ <a href="#lexer.any"><code>lexer.any</code></a>, <a href="#lexer.ascii"><code>lexer.ascii</code></a>, <a href="#lexer.extend"><code>lexer.extend</code></a>, <a href="#lexer.alpha"><code>lexer.alpha</code></a>,
+ <a href="#lexer.digit"><code>lexer.digit</code></a>, <a href="#lexer.alnum"><code>lexer.alnum</code></a>, <a href="#lexer.lower"><code>lexer.lower</code></a>, <a href="#lexer.upper"><code>lexer.upper</code></a>,
+ <a href="#lexer.xdigit"><code>lexer.xdigit</code></a>, <a href="#lexer.cntrl"><code>lexer.cntrl</code></a>, <a href="#lexer.graph"><code>lexer.graph</code></a>, <a href="#lexer.print"><code>lexer.print</code></a>,
+ <a href="#lexer.punct"><code>lexer.punct</code></a>, <a href="#lexer.space"><code>lexer.space</code></a>, <a href="#lexer.newline"><code>lexer.newline</code></a>,
+ <a href="#lexer.nonnewline"><code>lexer.nonnewline</code></a>, <a href="#lexer.nonnewline_esc"><code>lexer.nonnewline_esc</code></a>, <a href="#lexer.dec_num"><code>lexer.dec_num</code></a>,
+ <a href="#lexer.hex_num"><code>lexer.hex_num</code></a>, <a href="#lexer.oct_num"><code>lexer.oct_num</code></a>, <a href="#lexer.integer"><code>lexer.integer</code></a>,
+ <a href="#lexer.float"><code>lexer.float</code></a>, and <a href="#lexer.word"><code>lexer.word</code></a>. You may use your own token names if
+ none of the above fit your language, but an advantage to using predefined
+ token names is that your lexer's tokens will inherit the universal syntax
+ highlighting color theme used by your text editor.</p>
+
+ <p><a id="lexer.Example.Tokens"></a></p>
+
+ <h5>Example Tokens</h5>
+
+ <p>So, how might you define other tokens like keywords, comments, and strings?
+ Here are some examples.</p>
+
+ <p><strong>Keywords</strong></p>
+
+ <p>Instead of matching <em>n</em> keywords with <em>n</em> <code>P('keyword_</code><em><code>n</code></em><code>')</code> ordered
+ choices, use another convenience function: <a href="#lexer.word_match"><code>lexer.word_match()</code></a>. It is
+ much easier and more efficient to write word matches like:</p>
+
+ <pre><code>
+ local keyword = token(lexer.KEYWORD, lexer.word_match[[
+ keyword_1 keyword_2 ... keyword_n
+ ]])
+
+ local case_insensitive_keyword = token(lexer.KEYWORD, lexer.word_match([[
+ KEYWORD_1 keyword_2 ... KEYword_n
+ ]], true))
+
+ local hyphened_keyword = token(lexer.KEYWORD, lexer.word_match[[
+ keyword-1 keyword-2 ... keyword-n
+ ]])
+ </code></pre>
+
+ <p>In order to more easily separate or categorize keyword sets, you can use Lua
+ line comments within keyword strings. Such comments will be ignored. For
+ example:</p>
+
+ <pre><code>
+ local keyword = token(lexer.KEYWORD, lexer.word_match[[
+ -- Version 1 keywords.
+ keyword_11, keyword_12 ... keyword_1n
+ -- Version 2 keywords.
+ keyword_21, keyword_22 ... keyword_2n
+ ...
+ -- Version N keywords.
+ keyword_m1, keyword_m2 ... keyword_mn
+ ]])
+ </code></pre>
+
+ <p><strong>Comments</strong></p>
+
+ <p>Line-style comments with a prefix character(s) are easy to express with LPeg:</p>
+
+ <pre><code>
+ local shell_comment = token(lexer.COMMENT, '#' * lexer.nonnewline^0)
+ local c_line_comment = token(lexer.COMMENT,
+ '//' * lexer.nonnewline_esc^0)
+ </code></pre>
+
+ <p>The comments above start with a '#' or "//" and go to the end of the line.
+ The second comment recognizes the next line also as a comment if the current
+ line ends with a '\' escape character.</p>
+
+ <p>C-style "block" comments with a start and end delimiter are also easy to
+ express:</p>
+
+ <pre><code>
+ local c_comment = token(lexer.COMMENT,
+ '/*' * (lexer.any - '*/')^0 * P('*/')^-1)
+ </code></pre>
+
+ <p>This comment starts with a "/*" sequence and contains anything up to and
+ including an ending "*/" sequence. The ending "*/" is optional so the lexer
+ can recognize unfinished comments as comments and highlight them properly.</p>
+
+ <p><strong>Strings</strong></p>
+
+ <p>It is tempting to think that a string is not much different from the block
+ comment shown above in that both have start and end delimiters:</p>
+
+ <pre><code>
+ local dq_str = '"' * (lexer.any - '"')^0 * P('"')^-1
+ local sq_str = "'" * (lexer.any - "'")^0 * P("'")^-1
+ local simple_string = token(lexer.STRING, dq_str + sq_str)
+ </code></pre>
+
+ <p>However, most programming languages allow escape sequences in strings such
+ that a sequence like "\&quot;" in a double-quoted string indicates that the
+ '&quot;' is not the end of the string. The above token incorrectly matches
+ such a string. Instead, use the <a href="#lexer.delimited_range"><code>lexer.delimited_range()</code></a> convenience
+ function.</p>
+
+ <pre><code>
+ local dq_str = lexer.delimited_range('"')
+ local sq_str = lexer.delimited_range("'")
+ local string = token(lexer.STRING, dq_str + sq_str)
+ </code></pre>
+
+ <p>In this case, the lexer treats '\' as an escape character in a string
+ sequence.</p>
+
+ <p><strong>Numbers</strong></p>
+
+ <p>Most programming languages have the same format for integer and float tokens,
+ so it might be as simple as using a couple of predefined LPeg patterns:</p>
+
+ <pre><code>
+ local number = token(lexer.NUMBER, lexer.float + lexer.integer)
+ </code></pre>
+
+ <p>However, some languages allow postfix characters on integers.</p>
+
+ <pre><code>
+ local integer = P('-')^-1 * (lexer.dec_num * S('lL')^-1)
+ local number = token(lexer.NUMBER, lexer.float + lexer.hex_num + integer)
+ </code></pre>
+
+ <p>Your language may need other tweaks, but it is up to you how fine-grained you
+ want your highlighting to be. After all, you are not writing a compiler or
+ interpreter!</p>
+
+ <p><a id="lexer.Rules"></a></p>
+
+ <h4>Rules</h4>
+
+ <p>Programming languages have grammars, which specify valid token structure. For
+ example, comments usually cannot appear within a string. Grammars consist of
+ rules, which are simply combinations of tokens. Recall from the lexer
+ template the <a href="#lexer.add_rule"><code>lexer.add_rule()</code></a> call, which adds a rule to the lexer's
+ grammar:</p>
+
+ <pre><code>
+ lex:add_rule('whitespace', ws)
+ </code></pre>
+
+ <p>Each rule has an associated name, but rule names are completely arbitrary and
+ serve only to identify and distinguish between different rules. Rule order is
+ important: if text does not match the first rule added to the grammar, the
+ lexer tries to match the second rule added, and so on. Right now this lexer
+ simply matches whitespace tokens under a rule named "whitespace".</p>
+
+ <p>To illustrate the importance of rule order, here is an example of a
+ simplified Lua lexer:</p>
+
+ <pre><code>
+ lex:add_rule('whitespace', token(lexer.WHITESPACE, ...))
+ lex:add_rule('keyword', token(lexer.KEYWORD, ...))
+ lex:add_rule('identifier', token(lexer.IDENTIFIER, ...))
+ lex:add_rule('string', token(lexer.STRING, ...))
+ lex:add_rule('comment', token(lexer.COMMENT, ...))
+ lex:add_rule('number', token(lexer.NUMBER, ...))
+ lex:add_rule('label', token(lexer.LABEL, ...))
+ lex:add_rule('operator', token(lexer.OPERATOR, ...))
+ </code></pre>
+
+ <p>Note how identifiers come after keywords. In Lua, as with most programming
+ languages, the characters allowed in keywords and identifiers are in the same
+ set (alphanumerics plus underscores). If the lexer added the "identifier"
+ rule before the "keyword" rule, all keywords would match identifiers and thus
+ incorrectly highlight as identifiers instead of keywords. The same idea
+ applies to function, constant, etc. tokens that you may want to distinguish
+ between: their rules should come before identifiers.</p>
+
+ <p>So what about text that does not match any rules? For example in Lua, the '!'
+ character is meaningless outside a string or comment. Normally the lexer
+ skips over such text. If instead you want to highlight these "syntax errors",
+ add an additional end rule:</p>
+
+ <pre><code>
+ lex:add_rule('whitespace', ws)
+ ...
+ lex:add_rule('error', token(lexer.ERROR, lexer.any))
+ </code></pre>
+
+ <p>This identifies and highlights any character not matched by an existing
+ rule as a <code>lexer.ERROR</code> token.</p>
+
+ <p>Even though the rules defined in the examples above contain a single token,
+ rules may consist of multiple tokens. For example, a rule for an HTML tag
+ could consist of a tag token followed by an arbitrary number of attribute
+ tokens, allowing the lexer to highlight all tokens separately. That rule
+ might look something like this:</p>
+
+ <pre><code>
+ lex:add_rule('tag', tag_start * (ws * attributes)^0 * tag_end^-1)
+ </code></pre>
+
+ <p>Note however that lexers with complex rules like these are more prone to lose
+ track of their state, especially if they span multiple lines.</p>
+
+ <p><a id="lexer.Summary"></a></p>
+
+ <h4>Summary</h4>
+
+ <p>Lexers primarily consist of tokens and grammar rules. At your disposal are a
+ number of convenience patterns and functions for rapidly creating a lexer. If
+ you choose to use predefined token names for your tokens, you do not have to
+ define how the lexer highlights them. The tokens will inherit the default
+ syntax highlighting color theme your editor uses.</p>
+
+ <p><a id="lexer.Advanced.Techniques"></a></p>
+
+ <h3>Advanced Techniques</h3>
+
+ <p><a id="lexer.Styles.and.Styling"></a></p>
+
+ <h4>Styles and Styling</h4>
+
+ <p>The most basic form of syntax highlighting is assigning different colors to
+ different tokens. Instead of highlighting with just colors, Scintilla allows
+ for more rich highlighting, or "styling", with different fonts, font sizes,
+ font attributes, and foreground and background colors, just to name a few.
+ The unit of this rich highlighting is called a "style". Styles are simply
+ strings of comma-separated property settings. By default, lexers associate
+ predefined token names like <code>lexer.WHITESPACE</code>, <code>lexer.COMMENT</code>,
+ <code>lexer.STRING</code>, etc. with particular styles as part of a universal color
+ theme. These predefined styles include <a href="#lexer.STYLE_CLASS"><code>lexer.STYLE_CLASS</code></a>,
+ <a href="#lexer.STYLE_COMMENT"><code>lexer.STYLE_COMMENT</code></a>, <a href="#lexer.STYLE_CONSTANT"><code>lexer.STYLE_CONSTANT</code></a>,
+ <a href="#lexer.STYLE_ERROR"><code>lexer.STYLE_ERROR</code></a>, <a href="#lexer.STYLE_EMBEDDED"><code>lexer.STYLE_EMBEDDED</code></a>,
+ <a href="#lexer.STYLE_FUNCTION"><code>lexer.STYLE_FUNCTION</code></a>, <a href="#lexer.STYLE_IDENTIFIER"><code>lexer.STYLE_IDENTIFIER</code></a>,
+ <a href="#lexer.STYLE_KEYWORD"><code>lexer.STYLE_KEYWORD</code></a>, <a href="#lexer.STYLE_LABEL"><code>lexer.STYLE_LABEL</code></a>, <a href="#lexer.STYLE_NUMBER"><code>lexer.STYLE_NUMBER</code></a>,
+ <a href="#lexer.STYLE_OPERATOR"><code>lexer.STYLE_OPERATOR</code></a>, <a href="#lexer.STYLE_PREPROCESSOR"><code>lexer.STYLE_PREPROCESSOR</code></a>,
+ <a href="#lexer.STYLE_REGEX"><code>lexer.STYLE_REGEX</code></a>, <a href="#lexer.STYLE_STRING"><code>lexer.STYLE_STRING</code></a>, <a href="#lexer.STYLE_TYPE"><code>lexer.STYLE_TYPE</code></a>,
+ <a href="#lexer.STYLE_VARIABLE"><code>lexer.STYLE_VARIABLE</code></a>, and <a href="#lexer.STYLE_WHITESPACE"><code>lexer.STYLE_WHITESPACE</code></a>. Like with
+ predefined token names and LPeg patterns, you may define your own styles. At
+ their core, styles are just strings, so you may create new ones and/or modify
+ existing ones. Each style consists of the following comma-separated settings:</p>
+
+ <table class="standard">
+ <thead>
+ <tr>
+ <th>Setting </th>
+ <th> Description</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td>font:<em>name</em> </td>
+ <td> The name of the font the style uses.</td>
+ </tr>
+ <tr>
+ <td>size:<em>int</em> </td>
+ <td> The size of the font the style uses.</td>
+ </tr>
+ <tr>
+ <td>[not]bold </td>
+ <td> Whether or not the font face is bold.</td>
+ </tr>
+ <tr>
+ <td>weight:<em>int</em> </td>
+ <td> The weight or boldness of a font, between 1 and 999.</td>
+ </tr>
+ <tr>
+ <td>[not]italics </td>
+ <td> Whether or not the font face is italic.</td>
+ </tr>
+ <tr>
+ <td>[not]underlined</td>
+ <td> Whether or not the font face is underlined.</td>
+ </tr>
+ <tr>
+ <td>fore:<em>color</em> </td>
+ <td> The foreground color of the font face.</td>
+ </tr>
+ <tr>
+ <td>back:<em>color</em> </td>
+ <td> The background color of the font face.</td>
+ </tr>
+ <tr>
+ <td>[not]eolfilled </td>
+ <td> Does the background color extend to the end of the line?</td>
+ </tr>
+ <tr>
+ <td>case:<em>char</em> </td>
+ <td> The case of the font ('u': upper, 'l': lower, 'm': normal).</td>
+ </tr>
+ <tr>
+ <td>[not]visible </td>
+ <td> Whether or not the text is visible.</td>
+ </tr>
+ <tr>
+ <td>[not]changeable</td>
+ <td> Whether the text is changeable or read-only.</td>
+ </tr>
+ </tbody>
+ </table>
+
+
+ <p>Specify font colors in either "#RRGGBB" format, "0xBBGGRR" format, or the
+ decimal equivalent of the latter. As with token names, LPeg patterns, and
+ styles, there is a set of predefined color names, but they vary depending on
+ the current color theme in use. Therefore, it is generally not a good idea to
+ manually define colors within styles in your lexer since they might not fit
+ into a user's chosen color theme. Try to refrain from even using predefined
+ colors in a style because that color may be theme-specific. Instead, the best
+ practice is to either use predefined styles or derive new color-agnostic
+ styles from predefined ones. For example, Lua "longstring" tokens use the
+ existing <code>lexer.STYLE_STRING</code> style instead of defining a new one.</p>
+
+ <p><a id="lexer.Example.Styles"></a></p>
+
+ <h5>Example Styles</h5>
+
+ <p>Defining styles is pretty straightforward. An empty style that inherits the
+ default theme settings is simply an empty string:</p>
+
+ <pre><code>
+ local style_nothing = ''
+ </code></pre>
+
+ <p>A similar style but with a bold font face looks like this:</p>
+
+ <pre><code>
+ local style_bold = 'bold'
+ </code></pre>
+
+ <p>If you want the same style, but also with an italic font face, define the new
+ style in terms of the old one:</p>
+
+ <pre><code>
+ local style_bold_italic = style_bold..',italics'
+ </code></pre>
+
+ <p>This allows you to derive new styles from predefined ones without having to
+ rewrite them. This operation leaves the old style unchanged. Thus if you
+ had a "static variable" token whose style you wanted to base off of
+ <code>lexer.STYLE_VARIABLE</code>, it would probably look like:</p>
+
+ <pre><code>
+ local style_static_var = lexer.STYLE_VARIABLE..',italics'
+ </code></pre>
+
+ <p>The color theme files in the <em>lexlua/themes/</em> folder give more examples of
+ style definitions.</p>
+
+ <p><a id="lexer.Token.Styles"></a></p>
+
+ <h4>Token Styles</h4>
+
+ <p>Lexers use the <a href="#lexer.add_style"><code>lexer.add_style()</code></a> function to assign styles to
+ particular tokens. Recall the token definition and from the lexer template:</p>
+
+ <pre><code>
+ local ws = token(lexer.WHITESPACE, lexer.space^1)
+ lex:add_rule('whitespace', ws)
+ </code></pre>
+
+ <p>Why is a style not assigned to the <code>lexer.WHITESPACE</code> token? As mentioned
+ earlier, lexers automatically associate tokens that use predefined token
+ names with a particular style. Only tokens with custom token names need
+ manual style associations. As an example, consider a custom whitespace token:</p>
+
+ <pre><code>
+ local ws = token('custom_whitespace', lexer.space^1)
+ </code></pre>
+
+ <p>Assigning a style to this token looks like:</p>
+
+ <pre><code>
+ lex:add_style('custom_whitespace', lexer.STYLE_WHITESPACE)
+ </code></pre>
+
+ <p>Do not confuse token names with rule names. They are completely different
+ entities. In the example above, the lexer associates the "custom_whitespace"
+ token with the existing style for <code>lexer.WHITESPACE</code> tokens. If instead you
+ prefer to color the background of whitespace a shade of grey, it might look
+ like:</p>
+
+ <pre><code>
+ local custom_style = lexer.STYLE_WHITESPACE..',back:$(color.grey)'
+ lex:add_style('custom_whitespace', custom_style)
+ </code></pre>
+
+ <p>Notice that the lexer peforms Scintilla-style "$()" property expansion.
+ You may also use "%()". Remember to refrain from assigning specific colors in
+ styles, but in this case, all user color themes probably define the
+ "color.grey" property.</p>
+
+ <p><a id="lexer.Line.Lexers"></a></p>
+
+ <h4>Line Lexers</h4>
+
+ <p>By default, lexers match the arbitrary chunks of text passed to them by
+ Scintilla. These chunks may be a full document, only the visible part of a
+ document, or even just portions of lines. Some lexers need to match whole
+ lines. For example, a lexer for the output of a file "diff" needs to know if
+ the line started with a '+' or '-' and then style the entire line
+ accordingly. To indicate that your lexer matches by line, create the lexer
+ with an extra parameter:</p>
+
+ <pre><code>
+ local lex = lexer.new('?', {lex_by_line = true})
+ </code></pre>
+
+ <p>Now the input text for the lexer is a single line at a time. Keep in mind
+ that line lexers do not have the ability to look ahead at subsequent lines.</p>
+
+ <p><a id="lexer.Embedded.Lexers"></a></p>
+
+ <h4>Embedded Lexers</h4>
+
+ <p>Lexers embed within one another very easily, requiring minimal effort. In the
+ following sections, the lexer being embedded is called the "child" lexer and
+ the lexer a child is being embedded in is called the "parent". For example,
+ consider an HTML lexer and a CSS lexer. Either lexer stands alone for styling
+ their respective HTML and CSS files. However, CSS can be embedded inside
+ HTML. In this specific case, the CSS lexer is the "child" lexer with the HTML
+ lexer being the "parent". Now consider an HTML lexer and a PHP lexer. This
+ sounds a lot like the case with CSS, but there is a subtle difference: PHP
+ <em>embeds itself into</em> HTML while CSS is <em>embedded in</em> HTML. This fundamental
+ difference results in two types of embedded lexers: a parent lexer that
+ embeds other child lexers in it (like HTML embedding CSS), and a child lexer
+ that embeds itself into a parent lexer (like PHP embedding itself in HTML).</p>
+
+ <p><a id="lexer.Parent.Lexer"></a></p>
+
+ <h5>Parent Lexer</h5>
+
+ <p>Before embedding a child lexer into a parent lexer, the parent lexer needs to
+ load the child lexer. This is done with the <a href="#lexer.load"><code>lexer.load()</code></a> function. For
+ example, loading the CSS lexer within the HTML lexer looks like:</p>
+
+ <pre><code>
+ local css = lexer.load('css')
+ </code></pre>
+
+ <p>The next part of the embedding process is telling the parent lexer when to
+ switch over to the child lexer and when to switch back. The lexer refers to
+ these indications as the "start rule" and "end rule", respectively, and are
+ just LPeg patterns. Continuing with the HTML/CSS example, the transition from
+ HTML to CSS is when the lexer encounters a "style" tag with a "type"
+ attribute whose value is "text/css":</p>
+
+ <pre><code>
+ local css_tag = P('&lt;style') * P(function(input, index)
+ if input:find('^[^&gt;]+type="text/css"', index) then
+ return index
+ end
+ end)
+ </code></pre>
+
+ <p>This pattern looks for the beginning of a "style" tag and searches its
+ attribute list for the text "<code>type="text/css"</code>". (In this simplified example,
+ the Lua pattern does not consider whitespace between the '=' nor does it
+ consider that using single quotes is valid.) If there is a match, the
+ functional pattern returns a value instead of <code>nil</code>. In this case, the value
+ returned does not matter because we ultimately want to style the "style" tag
+ as an HTML tag, so the actual start rule looks like this:</p>
+
+ <pre><code>
+ local css_start_rule = #css_tag * tag
+ </code></pre>
+
+ <p>Now that the parent knows when to switch to the child, it needs to know when
+ to switch back. In the case of HTML/CSS, the switch back occurs when the
+ lexer encounters an ending "style" tag, though the lexer should still style
+ the tag as an HTML tag:</p>
+
+ <pre><code>
+ local css_end_rule = #P('&lt;/style&gt;') * tag
+ </code></pre>
+
+ <p>Once the parent loads the child lexer and defines the child's start and end
+ rules, it embeds the child with the <a href="#lexer.embed"><code>lexer.embed()</code></a> function:</p>
+
+ <pre><code>
+ lex:embed(css, css_start_rule, css_end_rule)
+ </code></pre>
+
+ <p><a id="lexer.Child.Lexer"></a></p>
+
+ <h5>Child Lexer</h5>
+
+ <p>The process for instructing a child lexer to embed itself into a parent is
+ very similar to embedding a child into a parent: first, load the parent lexer
+ into the child lexer with the <a href="#lexer.load"><code>lexer.load()</code></a> function and then create
+ start and end rules for the child lexer. However, in this case, call
+ <a href="#lexer.embed"><code>lexer.embed()</code></a> with switched arguments. For example, in the PHP lexer:</p>
+
+ <pre><code>
+ local html = lexer.load('html')
+ local php_start_rule = token('php_tag', '&lt;?php ')
+ local php_end_rule = token('php_tag', '?&gt;')
+ lex:add_style('php_tag', lexer.STYLE_EMBEDDED)
+ html:embed(lex, php_start_rule, php_end_rule)
+ </code></pre>
+
+ <p><a id="lexer.Lexers.with.Complex.State"></a></p>
+
+ <h4>Lexers with Complex State</h4>
+
+ <p>A vast majority of lexers are not stateful and can operate on any chunk of
+ text in a document. However, there may be rare cases where a lexer does need
+ to keep track of some sort of persistent state. Rather than using <code>lpeg.P</code>
+ function patterns that set state variables, it is recommended to make use of
+ Scintilla's built-in, per-line state integers via <a href="#lexer.line_state"><code>lexer.line_state</code></a>. It
+ was designed to accommodate up to 32 bit flags for tracking state.
+ <a href="#lexer.line_from_position"><code>lexer.line_from_position()</code></a> will return the line for any position given
+ to an <code>lpeg.P</code> function pattern. (Any positions derived from that position
+ argument will also work.)</p>
+
+ <p>Writing stateful lexers is beyond the scope of this document.</p>
+
+ <p><a id="lexer.Code.Folding"></a></p>
+
+ <h3>Code Folding</h3>
+
+ <p>When reading source code, it is occasionally helpful to temporarily hide
+ blocks of code like functions, classes, comments, etc. This is the concept of
+ "folding". In many Scintilla-based editors, such as Textadept, little indicators
+ in the editor margins appear next to code that can be folded at places called
+ "fold points". When the user clicks an indicator, the editor hides the code
+ associated with the indicator until the user clicks the indicator again. The
+ lexer specifies these fold points and what code exactly to fold.</p>
+
+ <p>The fold points for most languages occur on keywords or character sequences.
+ Examples of fold keywords are "if" and "end" in Lua and examples of fold
+ character sequences are '{', '}', "/*", and "*/" in C for code block and
+ comment delimiters, respectively. However, these fold points cannot occur
+ just anywhere. For example, lexers should not recognize fold keywords that
+ appear within strings or comments. The <a href="#lexer.add_fold_point"><code>lexer.add_fold_point()</code></a> function
+ allows you to conveniently define fold points with such granularity. For
+ example, consider C:</p>
+
+ <pre><code>
+ lex:add_fold_point(lexer.OPERATOR, '{', '}')
+ lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+ </code></pre>
+
+ <p>The first assignment states that any '{' or '}' that the lexer recognized as
+ an <code>lexer.OPERATOR</code> token is a fold point. Likewise, the second assignment
+ states that any "/*" or "*/" that the lexer recognizes as part of a
+ <code>lexer.COMMENT</code> token is a fold point. The lexer does not consider any
+ occurrences of these characters outside their defined tokens (such as in a
+ string) as fold points. How do you specify fold keywords? Here is an example
+ for Lua:</p>
+
+ <pre><code>
+ lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+ lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+ lex:add_fold_point(lexer.KEYWORD, 'function', 'end')
+ lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until')
+ </code></pre>
+
+ <p>If your lexer has case-insensitive keywords as fold points, simply add a
+ <code>case_insensitive_fold_points = true</code> option to <a href="#lexer.new"><code>lexer.new()</code></a>, and
+ specify keywords in lower case.</p>
+
+ <p>If your lexer needs to do some additional processing in order to determine if
+ a token is a fold point, pass a function that returns an integer to
+ <code>lex:add_fold_point()</code>. Returning <code>1</code> indicates the token is a beginning fold
+ point and returning <code>-1</code> indicates the token is an ending fold point.
+ Returning <code>0</code> indicates the token is not a fold point. For example:</p>
+
+ <pre><code>
+ local function fold_strange_token(text, pos, line, s, symbol)
+ if ... then
+ return 1 -- beginning fold point
+ elseif ... then
+ return -1 -- ending fold point
+ end
+ return 0
+ end
+
+ lex:add_fold_point('strange_token', '|', fold_strange_token)
+ </code></pre>
+
+ <p>Any time the lexer encounters a '|' that is a "strange_token", it calls the
+ <code>fold_strange_token</code> function to determine if '|' is a fold point. The lexer
+ calls these functions with the following arguments: the text to identify fold
+ points in, the beginning position of the current line in the text to fold,
+ the current line's text, the position in the current line the fold point text
+ starts at, and the fold point text itself.</p>
+
+ <p><a id="lexer.Fold.by.Indentation"></a></p>
+
+ <h4>Fold by Indentation</h4>
+
+ <p>Some languages have significant whitespace and/or no delimiters that indicate
+ fold points. If your lexer falls into this category and you would like to
+ mark fold points based on changes in indentation, create the lexer with a
+ <code>fold_by_indentation = true</code> option:</p>
+
+ <pre><code>
+ local lex = lexer.new('?', {fold_by_indentation = true})
+ </code></pre>
+
+ <p><a id="lexer.Using.Lexers"></a></p>
+
+ <h3>Using Lexers</h3>
+
+ <p><a id="lexer.Textadept"></a></p>
+
+ <h4>Textadept</h4>
+
+ <p>Put your lexer in your <em>~/.textadept/lexers/</em> directory so you do not
+ overwrite it when upgrading Textadept. Also, lexers in this directory
+ override default lexers. Thus, Textadept loads a user <em>lua</em> lexer instead of
+ the default <em>lua</em> lexer. This is convenient for tweaking a default lexer to
+ your liking. Then add a <a href="https://foicica.com/textadept/api.html#textadept.file_types">file type</a> for your lexer if necessary.</p>
+
+ <p><a id="lexer.Migrating.Legacy.Lexers"></a></p>
+
+ <h3>Migrating Legacy Lexers</h3>
+
+ <p>Legacy lexers are of the form:</p>
+
+ <pre><code>
+ local l = require('lexer')
+ local token, word_match = l.token, l.word_match
+ local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+ local M = {_NAME = '?'}
+
+ [... token and pattern definitions ...]
+
+ M._rules = {
+ {'rule', pattern},
+ [...]
+ }
+
+ M._tokenstyles = {
+ 'token' = 'style',
+ [...]
+ }
+
+ M._foldsymbols = {
+ _patterns = {...},
+ ['token'] = {['start'] = 1, ['end'] = -1},
+ [...]
+ }
+
+ return M
+ </code></pre>
+
+ <p>While such legacy lexers will be handled just fine without any
+ changes, it is recommended that you migrate yours. The migration process is
+ fairly straightforward:</p>
+
+ <ol>
+ <li>Replace all instances of <code>l</code> with <code>lexer</code>, as it's better practice and
+ results in less confusion.</li>
+ <li>Replace <code>local M = {_NAME = '?'}</code> with <code>local lex = lexer.new('?')</code>, where
+ <code>?</code> is the name of your legacy lexer. At the end of the lexer, change
+ <code>return M</code> to <code>return lex</code>.</li>
+ <li>Instead of defining rules towards the end of your lexer, define your rules
+ as you define your tokens and patterns using
+ <a href="#lexer.add_rule"><code>lex:add_rule()</code></a>.</li>
+ <li>Similarly, any custom token names should have their styles immediately
+ defined using <a href="#lexer.add_style"><code>lex:add_style()</code></a>.</li>
+ <li>Convert any table arguments passed to <a href="#lexer.word_match"><code>lexer.word_match()</code></a> to a
+ space-separated string of words.</li>
+ <li>Replace any calls to <code>lexer.embed(M, child, ...)</code> and
+ <code>lexer.embed(parent, M, ...)</code> with
+ <a href="#lexer.embed"><code>lex:embed</code></a><code>(child, ...)</code> and <code>parent:embed(lex, ...)</code>,
+ respectively.</li>
+ <li>Define fold points with simple calls to
+ <a href="#lexer.add_fold_point"><code>lex:add_fold_point()</code></a>. No need to mess with Lua
+ patterns anymore.</li>
+ <li>Any legacy lexer options such as <code>M._FOLDBYINDENTATION</code>, <code>M._LEXBYLINE</code>,
+ <code>M._lexer</code>, etc. should be added as table options to <a href="#lexer.new"><code>lexer.new()</code></a>.</li>
+ <li>Any external lexer rule fetching and/or modifications via <code>lexer._RULES</code>
+ should be changed to use <a href="#lexer.get_rule"><code>lexer.get_rule()</code></a> and
+ <a href="#lexer.modify_rule"><code>lexer.modify_rule()</code></a>.</li>
+ </ol>
+
+
+ <p>As an example, consider the following sample legacy lexer:</p>
+
+ <pre><code>
+ local l = require('lexer')
+ local token, word_match = l.token, l.word_match
+ local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+ local M = {_NAME = 'legacy'}
+
+ local ws = token(l.WHITESPACE, l.space^1)
+ local comment = token(l.COMMENT, '#' * l.nonnewline^0)
+ local string = token(l.STRING, l.delimited_range('"'))
+ local number = token(l.NUMBER, l.float + l.integer)
+ local keyword = token(l.KEYWORD, word_match{'foo', 'bar', 'baz'})
+ local custom = token('custom', P('quux'))
+ local identifier = token(l.IDENTIFIER, l.word)
+ local operator = token(l.OPERATOR, S('+-*/%^=&lt;&gt;,.()[]{}'))
+
+ M._rules = {
+ {'whitespace', ws},
+ {'keyword', keyword},
+ {'custom', custom},
+ {'identifier', identifier},
+ {'string', string},
+ {'comment', comment},
+ {'number', number},
+ {'operator', operator}
+ }
+
+ M._tokenstyles = {
+ 'custom' = l.STYLE_KEYWORD..',bold'
+ }
+
+ M._foldsymbols = {
+ _patterns = {'[{}]'},
+ [l.OPERATOR] = {['{'] = 1, ['}'] = -1}
+ }
+
+ return M
+ </code></pre>
+
+ <p>Following the migration steps would yield:</p>
+
+ <pre><code>
+ local lexer = require('lexer')
+ local token, word_match = lexer.token, lexer.word_match
+ local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+ local lex = lexer.new('legacy')
+
+ lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+ lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[foo bar baz]]))
+ lex:add_rule('custom', token('custom', P('quux')))
+ lex:add_style('custom', lexer.STYLE_KEYWORD..',bold')
+ lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+ lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"')))
+ lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+ lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+ lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%^=&lt;&gt;,.()[]{}')))
+
+ lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+ return lex
+ </code></pre>
+
+ <p><a id="lexer.Considerations"></a></p>
+
+ <h3>Considerations</h3>
+
+ <p><a id="lexer.Performance"></a></p>
+
+ <h4>Performance</h4>
+
+ <p>There might be some slight overhead when initializing a lexer, but loading a
+ file from disk into Scintilla is usually more expensive. On modern computer
+ systems, I see no difference in speed between Lua lexers and Scintilla's C++
+ ones. Optimize lexers for speed by re-arranging <code>lexer.add_rule()</code> calls so
+ that the most common rules match first. Do keep in mind that order matters
+ for similar rules.</p>
+
+ <p>In some cases, folding may be far more expensive than lexing, particularly
+ in lexers with a lot of potential fold points. If your lexer is exhibiting
+ signs of slowness, try disabling folding your text editor first. If that
+ speeds things up, you can try reducing the number of fold points you added,
+ overriding <code>lexer.fold()</code> with your own implementation, or simply eliminating
+ folding support from your lexer.</p>
+
+ <p><a id="lexer.Limitations"></a></p>
+
+ <h4>Limitations</h4>
+
+ <p>Embedded preprocessor languages like PHP cannot completely embed in their
+ parent languages in that the parent's tokens do not support start and end
+ rules. This mostly goes unnoticed, but code like</p>
+
+ <pre><code>
+ &lt;div id="&lt;?php echo $id; ?&gt;"&gt;
+ </code></pre>
+
+ <p>will not style correctly.</p>
+
+ <p><a id="lexer.Troubleshooting"></a></p>
+
+ <h4>Troubleshooting</h4>
+
+ <p>Errors in lexers can be tricky to debug. Lexers print Lua errors to
+ <code>io.stderr</code> and <code>_G.print()</code> statements to <code>io.stdout</code>. Running your editor
+ from a terminal is the easiest way to see errors as they occur.</p>
+
+ <p><a id="lexer.Risks"></a></p>
+
+ <h4>Risks</h4>
+
+ <p>Poorly written lexers have the ability to crash Scintilla (and thus its
+ containing application), so unsaved data might be lost. However, I have only
+ observed these crashes in early lexer development, when syntax errors or
+ pattern errors are present. Once the lexer actually starts styling text
+ (either correctly or incorrectly, it does not matter), I have not observed
+ any crashes.</p>
+
+ <p><a id="lexer.Acknowledgements"></a></p>
+
+ <h4>Acknowledgements</h4>
+
+ <p>Thanks to Peter Odding for his <a href="http://lua-users.org/lists/lua-l/2007-04/msg00116.html">lexer post</a> on the Lua mailing list
+ that inspired me, and thanks to Roberto Ierusalimschy for LPeg.</p>
+
+ <h2>Lua <code>lexer</code> module API fields</h2>
+
+ <p><a id="lexer.CLASS"></a></p>
+
+ <h3><code>lexer.CLASS</code> (string)</h3>
+
+ <p>The token name for class tokens.</p>
+
+ <p><a id="lexer.COMMENT"></a></p>
+
+ <h3><code>lexer.COMMENT</code> (string)</h3>
+
+ <p>The token name for comment tokens.</p>
+
+ <p><a id="lexer.CONSTANT"></a></p>
+
+ <h3><code>lexer.CONSTANT</code> (string)</h3>
+
+ <p>The token name for constant tokens.</p>
+
+ <p><a id="lexer.DEFAULT"></a></p>
+
+ <h3><code>lexer.DEFAULT</code> (string)</h3>
+
+ <p>The token name for default tokens.</p>
+
+ <p><a id="lexer.ERROR"></a></p>
+
+ <h3><code>lexer.ERROR</code> (string)</h3>
+
+ <p>The token name for error tokens.</p>
+
+ <p><a id="lexer.FOLD_BASE"></a></p>
+
+ <h3><code>lexer.FOLD_BASE</code> (number)</h3>
+
+ <p>The initial (root) fold level.</p>
+
+ <p><a id="lexer.FOLD_BLANK"></a></p>
+
+ <h3><code>lexer.FOLD_BLANK</code> (number)</h3>
+
+ <p>Flag indicating that the line is blank.</p>
+
+ <p><a id="lexer.FOLD_HEADER"></a></p>
+
+ <h3><code>lexer.FOLD_HEADER</code> (number)</h3>
+
+ <p>Flag indicating the line is fold point.</p>
+
+ <p><a id="lexer.FUNCTION"></a></p>
+
+ <h3><code>lexer.FUNCTION</code> (string)</h3>
+
+ <p>The token name for function tokens.</p>
+
+ <p><a id="lexer.IDENTIFIER"></a></p>
+
+ <h3><code>lexer.IDENTIFIER</code> (string)</h3>
+
+ <p>The token name for identifier tokens.</p>
+
+ <p><a id="lexer.KEYWORD"></a></p>
+
+ <h3><code>lexer.KEYWORD</code> (string)</h3>
+
+ <p>The token name for keyword tokens.</p>
+
+ <p><a id="lexer.LABEL"></a></p>
+
+ <h3><code>lexer.LABEL</code> (string)</h3>
+
+ <p>The token name for label tokens.</p>
+
+ <p><a id="lexer.NUMBER"></a></p>
+
+ <h3><code>lexer.NUMBER</code> (string)</h3>
+
+ <p>The token name for number tokens.</p>
+
+ <p><a id="lexer.OPERATOR"></a></p>
+
+ <h3><code>lexer.OPERATOR</code> (string)</h3>
+
+ <p>The token name for operator tokens.</p>
+
+ <p><a id="lexer.PREPROCESSOR"></a></p>
+
+ <h3><code>lexer.PREPROCESSOR</code> (string)</h3>
+
+ <p>The token name for preprocessor tokens.</p>
+
+ <p><a id="lexer.REGEX"></a></p>
+
+ <h3><code>lexer.REGEX</code> (string)</h3>
+
+ <p>The token name for regex tokens.</p>
+
+ <p><a id="lexer.STRING"></a></p>
+
+ <h3><code>lexer.STRING</code> (string)</h3>
+
+ <p>The token name for string tokens.</p>
+
+ <p><a id="lexer.STYLE_BRACEBAD"></a></p>
+
+ <h3><code>lexer.STYLE_BRACEBAD</code> (string)</h3>
+
+ <p>The style used for unmatched brace characters.</p>
+
+ <p><a id="lexer.STYLE_BRACELIGHT"></a></p>
+
+ <h3><code>lexer.STYLE_BRACELIGHT</code> (string)</h3>
+
+ <p>The style used for highlighted brace characters.</p>
+
+ <p><a id="lexer.STYLE_CALLTIP"></a></p>
+
+ <h3><code>lexer.STYLE_CALLTIP</code> (string)</h3>
+
+ <p>The style used by call tips if <a href="#buffer.call_tip_use_style"><code>buffer.call_tip_use_style</code></a> is set.
+ Only the font name, size, and color attributes are used.</p>
+
+ <p><a id="lexer.STYLE_CLASS"></a></p>
+
+ <h3><code>lexer.STYLE_CLASS</code> (string)</h3>
+
+ <p>The style typically used for class definitions.</p>
+
+ <p><a id="lexer.STYLE_COMMENT"></a></p>
+
+ <h3><code>lexer.STYLE_COMMENT</code> (string)</h3>
+
+ <p>The style typically used for code comments.</p>
+
+ <p><a id="lexer.STYLE_CONSTANT"></a></p>
+
+ <h3><code>lexer.STYLE_CONSTANT</code> (string)</h3>
+
+ <p>The style typically used for constants.</p>
+
+ <p><a id="lexer.STYLE_CONTROLCHAR"></a></p>
+
+ <h3><code>lexer.STYLE_CONTROLCHAR</code> (string)</h3>
+
+ <p>The style used for control characters.
+ Color attributes are ignored.</p>
+
+ <p><a id="lexer.STYLE_DEFAULT"></a></p>
+
+ <h3><code>lexer.STYLE_DEFAULT</code> (string)</h3>
+
+ <p>The style all styles are based off of.</p>
+
+ <p><a id="lexer.STYLE_EMBEDDED"></a></p>
+
+ <h3><code>lexer.STYLE_EMBEDDED</code> (string)</h3>
+
+ <p>The style typically used for embedded code.</p>
+
+ <p><a id="lexer.STYLE_ERROR"></a></p>
+
+ <h3><code>lexer.STYLE_ERROR</code> (string)</h3>
+
+ <p>The style typically used for erroneous syntax.</p>
+
+ <p><a id="lexer.STYLE_FOLDDISPLAYTEXT"></a></p>
+
+ <h3><code>lexer.STYLE_FOLDDISPLAYTEXT</code> (string)</h3>
+
+ <p>The style used for fold display text.</p>
+
+ <p><a id="lexer.STYLE_FUNCTION"></a></p>
+
+ <h3><code>lexer.STYLE_FUNCTION</code> (string)</h3>
+
+ <p>The style typically used for function definitions.</p>
+
+ <p><a id="lexer.STYLE_IDENTIFIER"></a></p>
+
+ <h3><code>lexer.STYLE_IDENTIFIER</code> (string)</h3>
+
+ <p>The style typically used for identifier words.</p>
+
+ <p><a id="lexer.STYLE_INDENTGUIDE"></a></p>
+
+ <h3><code>lexer.STYLE_INDENTGUIDE</code> (string)</h3>
+
+ <p>The style used for indentation guides.</p>
+
+ <p><a id="lexer.STYLE_KEYWORD"></a></p>
+
+ <h3><code>lexer.STYLE_KEYWORD</code> (string)</h3>
+
+ <p>The style typically used for language keywords.</p>
+
+ <p><a id="lexer.STYLE_LABEL"></a></p>
+
+ <h3><code>lexer.STYLE_LABEL</code> (string)</h3>
+
+ <p>The style typically used for labels.</p>
+
+ <p><a id="lexer.STYLE_LINENUMBER"></a></p>
+
+ <h3><code>lexer.STYLE_LINENUMBER</code> (string)</h3>
+
+ <p>The style used for all margins except fold margins.</p>
+
+ <p><a id="lexer.STYLE_NUMBER"></a></p>
+
+ <h3><code>lexer.STYLE_NUMBER</code> (string)</h3>
+
+ <p>The style typically used for numbers.</p>
+
+ <p><a id="lexer.STYLE_OPERATOR"></a></p>
+
+ <h3><code>lexer.STYLE_OPERATOR</code> (string)</h3>
+
+ <p>The style typically used for operators.</p>
+
+ <p><a id="lexer.STYLE_PREPROCESSOR"></a></p>
+
+ <h3><code>lexer.STYLE_PREPROCESSOR</code> (string)</h3>
+
+ <p>The style typically used for preprocessor statements.</p>
+
+ <p><a id="lexer.STYLE_REGEX"></a></p>
+
+ <h3><code>lexer.STYLE_REGEX</code> (string)</h3>
+
+ <p>The style typically used for regular expression strings.</p>
+
+ <p><a id="lexer.STYLE_STRING"></a></p>
+
+ <h3><code>lexer.STYLE_STRING</code> (string)</h3>
+
+ <p>The style typically used for strings.</p>
+
+ <p><a id="lexer.STYLE_TYPE"></a></p>
+
+ <h3><code>lexer.STYLE_TYPE</code> (string)</h3>
+
+ <p>The style typically used for static types.</p>
+
+ <p><a id="lexer.STYLE_VARIABLE"></a></p>
+
+ <h3><code>lexer.STYLE_VARIABLE</code> (string)</h3>
+
+ <p>The style typically used for variables.</p>
+
+ <p><a id="lexer.STYLE_WHITESPACE"></a></p>
+
+ <h3><code>lexer.STYLE_WHITESPACE</code> (string)</h3>
+
+ <p>The style typically used for whitespace.</p>
+
+ <p><a id="lexer.TYPE"></a></p>
+
+ <h3><code>lexer.TYPE</code> (string)</h3>
+
+ <p>The token name for type tokens.</p>
+
+ <p><a id="lexer.VARIABLE"></a></p>
+
+ <h3><code>lexer.VARIABLE</code> (string)</h3>
+
+ <p>The token name for variable tokens.</p>
+
+ <p><a id="lexer.WHITESPACE"></a></p>
+
+ <h3><code>lexer.WHITESPACE</code> (string)</h3>
+
+ <p>The token name for whitespace tokens.</p>
+
+ <p><a id="lexer.alnum"></a></p>
+
+ <h3><code>lexer.alnum</code> (pattern)</h3>
+
+ <p>A pattern that matches any alphanumeric character ('A'-'Z', 'a'-'z',
+ '0'-'9').</p>
+
+ <p><a id="lexer.alpha"></a></p>
+
+ <h3><code>lexer.alpha</code> (pattern)</h3>
+
+ <p>A pattern that matches any alphabetic character ('A'-'Z', 'a'-'z').</p>
+
+ <p><a id="lexer.any"></a></p>
+
+ <h3><code>lexer.any</code> (pattern)</h3>
+
+ <p>A pattern that matches any single character.</p>
+
+ <p><a id="lexer.ascii"></a></p>
+
+ <h3><code>lexer.ascii</code> (pattern)</h3>
+
+ <p>A pattern that matches any ASCII character (codes 0 to 127).</p>
+
+ <p><a id="lexer.cntrl"></a></p>
+
+ <h3><code>lexer.cntrl</code> (pattern)</h3>
+
+ <p>A pattern that matches any control character (ASCII codes 0 to 31).</p>
+
+ <p><a id="lexer.dec_num"></a></p>
+
+ <h3><code>lexer.dec_num</code> (pattern)</h3>
+
+ <p>A pattern that matches a decimal number.</p>
+
+ <p><a id="lexer.digit"></a></p>
+
+ <h3><code>lexer.digit</code> (pattern)</h3>
+
+ <p>A pattern that matches any digit ('0'-'9').</p>
+
+ <p><a id="lexer.extend"></a></p>
+
+ <h3><code>lexer.extend</code> (pattern)</h3>
+
+ <p>A pattern that matches any ASCII extended character (codes 0 to 255).</p>
+
+ <p><a id="lexer.float"></a></p>
+
+ <h3><code>lexer.float</code> (pattern)</h3>
+
+ <p>A pattern that matches a floating point number.</p>
+
+ <p><a id="lexer.fold_level"></a></p>
+
+ <h3><code>lexer.fold_level</code> (table, Read-only)</h3>
+
+ <p>Table of fold level bit-masks for line numbers starting from zero.
+ Fold level masks are composed of an integer level combined with any of the
+ following bits:</p>
+
+ <ul>
+ <li><code>lexer.FOLD_BASE</code>
+ The initial fold level.</li>
+ <li><code>lexer.FOLD_BLANK</code>
+ The line is blank.</li>
+ <li><code>lexer.FOLD_HEADER</code>
+ The line is a header, or fold point.</li>
+ </ul>
+
+
+ <p><a id="lexer.graph"></a></p>
+
+ <h3><code>lexer.graph</code> (pattern)</h3>
+
+ <p>A pattern that matches any graphical character ('!' to '~').</p>
+
+ <p><a id="lexer.hex_num"></a></p>
+
+ <h3><code>lexer.hex_num</code> (pattern)</h3>
+
+ <p>A pattern that matches a hexadecimal number.</p>
+
+ <p><a id="lexer.indent_amount"></a></p>
+
+ <h3><code>lexer.indent_amount</code> (table, Read-only)</h3>
+
+ <p>Table of indentation amounts in character columns, for line numbers
+ starting from zero.</p>
+
+ <p><a id="lexer.integer"></a></p>
+
+ <h3><code>lexer.integer</code> (pattern)</h3>
+
+ <p>A pattern that matches either a decimal, hexadecimal, or octal number.</p>
+
+ <p><a id="lexer.line_state"></a></p>
+
+ <h3><code>lexer.line_state</code> (table)</h3>
+
+ <p>Table of integer line states for line numbers starting from zero.
+ Line states can be used by lexers for keeping track of persistent states.</p>
+
+ <p><a id="lexer.lower"></a></p>
+
+ <h3><code>lexer.lower</code> (pattern)</h3>
+
+ <p>A pattern that matches any lower case character ('a'-'z').</p>
+
+ <p><a id="lexer.newline"></a></p>
+
+ <h3><code>lexer.newline</code> (pattern)</h3>
+
+ <p>A pattern that matches any set of end of line characters.</p>
+
+ <p><a id="lexer.nonnewline"></a></p>
+
+ <h3><code>lexer.nonnewline</code> (pattern)</h3>
+
+ <p>A pattern that matches any single, non-newline character.</p>
+
+ <p><a id="lexer.nonnewline_esc"></a></p>
+
+ <h3><code>lexer.nonnewline_esc</code> (pattern)</h3>
+
+ <p>A pattern that matches any single, non-newline character or any set of end
+ of line characters escaped with '\'.</p>
+
+ <p><a id="lexer.oct_num"></a></p>
+
+ <h3><code>lexer.oct_num</code> (pattern)</h3>
+
+ <p>A pattern that matches an octal number.</p>
+
+ <p><a id="lexer.path"></a></p>
+
+ <h3><code>lexer.path</code> (string)</h3>
+
+ <p>The path used to search for a lexer to load.
+ Identical in format to Lua's <code>package.path</code> string.
+ The default value is <code>package.path</code>.</p>
+
+ <p><a id="lexer.print"></a></p>
+
+ <h3><code>lexer.print</code> (pattern)</h3>
+
+ <p>A pattern that matches any printable character (' ' to '~').</p>
+
+ <p><a id="lexer.property"></a></p>
+
+ <h3><code>lexer.property</code> (table)</h3>
+
+ <p>Map of key-value string pairs.</p>
+
+ <p><a id="lexer.property_expanded"></a></p>
+
+ <h3><code>lexer.property_expanded</code> (table, Read-only)</h3>
+
+ <p>Map of key-value string pairs with <code>$()</code> and <code>%()</code> variable replacement
+ performed in values.</p>
+
+ <p><a id="lexer.property_int"></a></p>
+
+ <h3><code>lexer.property_int</code> (table, Read-only)</h3>
+
+ <p>Map of key-value pairs with values interpreted as numbers, or <code>0</code> if not
+ found.</p>
+
+ <p><a id="lexer.punct"></a></p>
+
+ <h3><code>lexer.punct</code> (pattern)</h3>
+
+ <p>A pattern that matches any punctuation character ('!' to '/', ':' to '@',
+ '[' to ''', '{' to '~').</p>
+
+ <p><a id="lexer.space"></a></p>
+
+ <h3><code>lexer.space</code> (pattern)</h3>
+
+ <p>A pattern that matches any whitespace character ('\t', '\v', '\f', '\n',
+ '\r', space).</p>
+
+ <p><a id="lexer.style_at"></a></p>
+
+ <h3><code>lexer.style_at</code> (table, Read-only)</h3>
+
+ <p>Table of style names at positions in the buffer starting from 1.</p>
+
+ <p><a id="lexer.upper"></a></p>
+
+ <h3><code>lexer.upper</code> (pattern)</h3>
+
+ <p>A pattern that matches any upper case character ('A'-'Z').</p>
+
+ <p><a id="lexer.word"></a></p>
+
+ <h3><code>lexer.word</code> (pattern)</h3>
+
+ <p>A pattern that matches a typical word. Words begin with a letter or
+ underscore and consist of alphanumeric and underscore characters.</p>
+
+ <p><a id="lexer.xdigit"></a></p>
+
+ <h3><code>lexer.xdigit</code> (pattern)</h3>
+
+ <p>A pattern that matches any hexadecimal digit ('0'-'9', 'A'-'F', 'a'-'f').</p>
+
+ <h2>Lua <code>lexer</code> module API functions</h2>
+
+ <p><a id="lexer.add_fold_point"></a></p>
+
+ <h3><code>lexer.add_fold_point</code> (lexer, token_name, start_symbol, end_symbol)</h3>
+
+ <p>Adds to lexer <em>lexer</em> a fold point whose beginning and end tokens are string
+ <em>token_name</em> tokens with string content <em>start_symbol</em> and <em>end_symbol</em>,
+ respectively.
+ In the event that <em>start_symbol</em> may or may not be a fold point depending on
+ context, and that additional processing is required, <em>end_symbol</em> may be a
+ function that ultimately returns <code>1</code> (indicating a beginning fold point),
+ <code>-1</code> (indicating an ending fold point), or <code>0</code> (indicating no fold point).
+ That function is passed the following arguments:</p>
+
+ <ul>
+ <li><code>text</code>: The text being processed for fold points.</li>
+ <li><code>pos</code>: The position in <em>text</em> of the beginning of the line currently
+ being processed.</li>
+ <li><code>line</code>: The text of the line currently being processed.</li>
+ <li><code>s</code>: The position of <em>start_symbol</em> in <em>line</em>.</li>
+ <li><code>symbol</code>: <em>start_symbol</em> itself.</li>
+ </ul>
+
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to add a fold point to.</li>
+ <li><code>token_name</code>: The token name of text that indicates a fold point.</li>
+ <li><code>start_symbol</code>: The text that indicates the beginning of a fold point.</li>
+ <li><code>end_symbol</code>: Either the text that indicates the end of a fold point, or
+ a function that returns whether or not <em>start_symbol</em> is a beginning fold
+ point (1), an ending fold point (-1), or not a fold point at all (0).</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>lex:add_fold_point(lexer.OPERATOR, '{', '}')</code></li>
+ <li><code>lex:add_fold_point(lexer.KEYWORD, 'if', 'end')</code></li>
+ <li><code>lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))</code></li>
+ <li><code>lex:add_fold_point('custom', function(text, pos, line, s, symbol)
+ ... end)</code></li>
+ </ul>
+
+
+ <p><a id="lexer.add_rule"></a></p>
+
+ <h3><code>lexer.add_rule</code> (lexer, id, rule)</h3>
+
+ <p>Adds pattern <em>rule</em> identified by string <em>id</em> to the ordered list of rules
+ for lexer <em>lexer</em>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to add the given rule to.</li>
+ <li><code>id</code>: The id associated with this rule. It does not have to be the same
+ as the name passed to <code>token()</code>.</li>
+ <li><code>rule</code>: The LPeg pattern of the rule.</li>
+ </ul>
+
+
+ <p>See also:</p>
+
+ <ul>
+ <li><a href="#lexer.modify_rule"><code>lexer.modify_rule</code></a></li>
+ </ul>
+
+
+ <p><a id="lexer.add_style"></a></p>
+
+ <h3><code>lexer.add_style</code> (lexer, token_name, style)</h3>
+
+ <p>Associates string <em>token_name</em> in lexer <em>lexer</em> with Scintilla style string
+ <em>style</em>.
+ Style strings are comma-separated property settings. Available property
+ settings are:</p>
+
+ <ul>
+ <li><code>font:name</code>: Font name.</li>
+ <li><code>size:int</code>: Font size.</li>
+ <li><code>bold</code> or <code>notbold</code>: Whether or not the font face is bold.</li>
+ <li><code>weight:int</code>: Font weight (between 1 and 999).</li>
+ <li><code>italics</code> or <code>notitalics</code>: Whether or not the font face is italic.</li>
+ <li><code>underlined</code> or <code>notunderlined</code>: Whether or not the font face is
+ underlined.</li>
+ <li><code>fore:color</code>: Font face foreground color in "#RRGGBB" or 0xBBGGRR format.</li>
+ <li><code>back:color</code>: Font face background color in "#RRGGBB" or 0xBBGGRR format.</li>
+ <li><code>eolfilled</code> or <code>noteolfilled</code>: Whether or not the background color
+ extends to the end of the line.</li>
+ <li><code>case:char</code>: Font case ('u' for uppercase, 'l' for lowercase, and 'm' for
+ mixed case).</li>
+ <li><code>visible</code> or <code>notvisible</code>: Whether or not the text is visible.</li>
+ <li><code>changeable</code> or <code>notchangeable</code>: Whether or not the text is changeable or
+ read-only.</li>
+ </ul>
+
+
+ <p>Property settings may also contain "$(property.name)" expansions for
+ properties defined in Scintilla, theme files, etc.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to add a style to.</li>
+ <li><code>token_name</code>: The name of the token to associated with the style.</li>
+ <li><code>style</code>: A style string for Scintilla.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>lex:add_style('longstring', lexer.STYLE_STRING)</code></li>
+ <li><code>lex:add_style('deprecated_function', lexer.STYLE_FUNCTION..',italics')</code></li>
+ <li><code>lex:add_style('visible_ws',
+ lexer.STYLE_WHITESPACE..',back:$(color.grey)')</code></li>
+ </ul>
+
+
+ <p><a id="lexer.delimited_range"></a></p>
+
+ <h3><code>lexer.delimited_range</code> (chars, single_line, no_escape, balanced)</h3>
+
+ <p>Creates and returns a pattern that matches a range of text bounded by
+ <em>chars</em> characters.
+ This is a convenience function for matching more complicated delimited ranges
+ like strings with escape characters and balanced parentheses. <em>single_line</em>
+ indicates whether or not the range must be on a single line, <em>no_escape</em>
+ indicates whether or not to ignore '\' as an escape character, and <em>balanced</em>
+ indicates whether or not to handle balanced ranges like parentheses and
+ requires <em>chars</em> to be composed of two characters.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>chars</code>: The character(s) that bound the matched range.</li>
+ <li><code>single_line</code>: Optional flag indicating whether or not the range must be
+ on a single line.</li>
+ <li><code>no_escape</code>: Optional flag indicating whether or not the range end
+ character may be escaped by a '\' character.</li>
+ <li><code>balanced</code>: Optional flag indicating whether or not to match a balanced
+ range, like the "%b" Lua pattern. This flag only applies if <em>chars</em>
+ consists of two different characters (e.g. "()").</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local dq_str_escapes = lexer.delimited_range('"')</code></li>
+ <li><code>local dq_str_noescapes = lexer.delimited_range('"', false, true)</code></li>
+ <li><code>local unbalanced_parens = lexer.delimited_range('()')</code></li>
+ <li><code>local balanced_parens = lexer.delimited_range('()', false, false,
+ true)</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p>See also:</p>
+
+ <ul>
+ <li><a href="#lexer.nested_pair"><code>lexer.nested_pair</code></a></li>
+ </ul>
+
+
+ <p><a id="lexer.embed"></a></p>
+
+ <h3><code>lexer.embed</code> (lexer, child, start_rule, end_rule)</h3>
+
+ <p>Embeds child lexer <em>child</em> in parent lexer <em>lexer</em> using patterns
+ <em>start_rule</em> and <em>end_rule</em>, which signal the beginning and end of the
+ embedded lexer, respectively.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The parent lexer.</li>
+ <li><code>child</code>: The child lexer.</li>
+ <li><code>start_rule</code>: The pattern that signals the beginning of the embedded
+ lexer.</li>
+ <li><code>end_rule</code>: The pattern that signals the end of the embedded lexer.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>html:embed(css, css_start_rule, css_end_rule)</code></li>
+ <li><code>html:embed(lex, php_start_rule, php_end_rule) -- from php lexer</code></li>
+ </ul>
+
+
+ <p><a id="lexer.fold"></a></p>
+
+ <h3><code>lexer.fold</code> (lexer, text, start_pos, start_line, start_level)</h3>
+
+ <p>Determines fold points in a chunk of text <em>text</em> using lexer <em>lexer</em>,
+ returning a table of fold levels associated with line numbers.
+ <em>text</em> starts at position <em>start_pos</em> on line number <em>start_line</em> with a
+ beginning fold level of <em>start_level</em> in the buffer.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to fold text with.</li>
+ <li><code>text</code>: The text in the buffer to fold.</li>
+ <li><code>start_pos</code>: The position in the buffer <em>text</em> starts at, starting at
+ zero.</li>
+ <li><code>start_line</code>: The line number <em>text</em> starts on.</li>
+ <li><code>start_level</code>: The fold level <em>text</em> starts on.</li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>table of fold levels associated with line numbers.</li>
+ </ul>
+
+
+ <p><a id="lexer.fold_line_comments"></a></p>
+
+ <h3><code>lexer.fold_line_comments</code> (prefix)</h3>
+
+ <p>Returns a fold function (to be passed to <code>lexer.add_fold_point()</code>) that folds
+ consecutive line comments that start with string <em>prefix</em>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>prefix</code>: The prefix string defining a line comment.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>lex:add_fold_point(lexer.COMMENT, '--',
+ lexer.fold_line_comments('--'))</code></li>
+ <li><code>lex:add_fold_point(lexer.COMMENT, '//',
+ lexer.fold_line_comments('//'))</code></li>
+ </ul>
+
+
+ <p><a id="lexer.get_rule"></a></p>
+
+ <h3><code>lexer.get_rule</code> (lexer, id)</h3>
+
+ <p>Returns the rule identified by string <em>id</em>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to fetch a rule from.</li>
+ <li><code>id</code>: The id of the rule to fetch.</li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p><a id="lexer.last_char_includes"></a></p>
+
+ <h3><code>lexer.last_char_includes</code> (s)</h3>
+
+ <p>Creates and returns a pattern that verifies that string set <em>s</em> contains the
+ first non-whitespace character behind the current match position.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>s</code>: String character set like one passed to <code>lpeg.S()</code>.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local regex = lexer.last_char_includes('+-*!%^&amp;|=,([{') *
+ lexer.delimited_range('/')</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p><a id="lexer.lex"></a></p>
+
+ <h3><code>lexer.lex</code> (lexer, text, init_style)</h3>
+
+ <p>Lexes a chunk of text <em>text</em> (that has an initial style number of
+ <em>init_style</em>) using lexer <em>lexer</em>, returning a table of token names and
+ positions.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to lex text with.</li>
+ <li><code>text</code>: The text in the buffer to lex.</li>
+ <li><code>init_style</code>: The current style. Multiple-language lexers use this to
+ determine which language to start lexing in.</li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>table of token names and positions.</li>
+ </ul>
+
+
+ <p><a id="lexer.line_from_position"></a></p>
+
+ <h3><code>lexer.line_from_position</code> (pos)</h3>
+
+ <p>Returns the line number of the line that contains position <em>pos</em>, which
+ starts from 1.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>pos</code>: The position to get the line number of.</li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>number</li>
+ </ul>
+
+
+ <p><a id="lexer.load"></a></p>
+
+ <h3><code>lexer.load</code> (name, alt_name, cache)</h3>
+
+ <p>Initializes or loads and returns the lexer of string name <em>name</em>.
+ Scintilla calls this function in order to load a lexer. Parent lexers also
+ call this function in order to load child lexers and vice-versa. The user
+ calls this function in order to load a lexer when using this module as a Lua
+ library.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>name</code>: The name of the lexing language.</li>
+ <li><code>alt_name</code>: The alternate name of the lexing language. This is useful for
+ embedding the same child lexer with multiple sets of start and end tokens.</li>
+ <li><code>cache</code>: Flag indicating whether or not to load lexers from the cache.
+ This should only be <code>true</code> when initially loading a lexer (e.g. not from
+ within another lexer for embedding purposes).
+ The default value is <code>false</code>.</li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>lexer object</li>
+ </ul>
+
+
+ <p><a id="lexer.modify_rule"></a></p>
+
+ <h3><code>lexer.modify_rule</code> (lexer, id, rule)</h3>
+
+ <p>Replaces in lexer <em>lexer</em> the existing rule identified by string <em>id</em> with
+ pattern <em>rule</em>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>lexer</code>: The lexer to modify.</li>
+ <li><code>id</code>: The id associated with this rule.</li>
+ <li><code>rule</code>: The LPeg pattern of the rule.</li>
+ </ul>
+
+
+ <p><a id="lexer.nested_pair"></a></p>
+
+ <h3><code>lexer.nested_pair</code> (start_chars, end_chars)</h3>
+
+ <p>Returns a pattern that matches a balanced range of text that starts with
+ string <em>start_chars</em> and ends with string <em>end_chars</em>.
+ With single-character delimiters, this function is identical to
+ <code>delimited_range(start_chars..end_chars, false, true, true)</code>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>start_chars</code>: The string starting a nested sequence.</li>
+ <li><code>end_chars</code>: The string ending a nested sequence.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local nested_comment = lexer.nested_pair('/*', '*/')</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p>See also:</p>
+
+ <ul>
+ <li><a href="#lexer.delimited_range"><code>lexer.delimited_range</code></a></li>
+ </ul>
+
+
+ <p><a id="lexer.new"></a></p>
+
+ <h3><code>lexer.new</code> (name, opts)</h3>
+
+ <p>Creates a returns a new lexer with the given name.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>name</code>: The lexer's name.</li>
+ <li><code>opts</code>: Table of lexer options. Options currently supported:
+
+ <ul>
+ <li><code>lex_by_line</code>: Whether or not the lexer only processes whole lines of
+ text (instead of arbitrary chunks of text) at a time.
+ Line lexers cannot look ahead to subsequent lines.
+ The default value is <code>false</code>.</li>
+ <li><code>fold_by_indentation</code>: Whether or not the lexer does not define any fold
+ points and that fold points should be calculated based on changes in line
+ indentation.
+ The default value is <code>false</code>.</li>
+ <li><code>case_insensitive_fold_points</code>: Whether or not fold points added via
+ <code>lexer.add_fold_point()</code> ignore case.
+ The default value is <code>false</code>.</li>
+ <li><code>inherit</code>: Lexer to inherit from.
+ The default value is <code>nil</code>.</li>
+ </ul>
+ </li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>lexer.new('rhtml', {inherit = lexer.load('html')})</code></li>
+ </ul>
+
+
+ <p><a id="lexer.starts_line"></a></p>
+
+ <h3><code>lexer.starts_line</code> (patt)</h3>
+
+ <p>Creates and returns a pattern that matches pattern <em>patt</em> only at the
+ beginning of a line.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>patt</code>: The LPeg pattern to match on the beginning of a line.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local preproc = token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ lexer.nonnewline^0)</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p><a id="lexer.token"></a></p>
+
+ <h3><code>lexer.token</code> (name, patt)</h3>
+
+ <p>Creates and returns a token pattern with token name <em>name</em> and pattern
+ <em>patt</em>.
+ If <em>name</em> is not a predefined token name, its style must be defined via
+ <code>lexer.add_style()</code>.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>name</code>: The name of token. If this name is not a predefined token name,
+ then a style needs to be assiciated with it via <code>lexer.add_style()</code>.</li>
+ <li><code>patt</code>: The LPeg pattern associated with the token.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local ws = token(lexer.WHITESPACE, lexer.space^1)</code></li>
+ <li><code>local annotation = token('annotation', '@' * lexer.word)</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+
+ <p><a id="lexer.word_match"></a></p>
+
+ <h3><code>lexer.word_match</code> (words, case_insensitive, word_chars)</h3>
+
+ <p>Creates and returns a pattern that matches any single word in string <em>words</em>.
+ <em>case_insensitive</em> indicates whether or not to ignore case when matching
+ words.
+ This is a convenience function for simplifying a set of ordered choice word
+ patterns.
+ If <em>words</em> is a multi-line string, it may contain Lua line comments (<code>--</code>)
+ that will ultimately be ignored.</p>
+
+ <p>Fields:</p>
+
+ <ul>
+ <li><code>words</code>: A string list of words separated by spaces.</li>
+ <li><code>case_insensitive</code>: Optional boolean flag indicating whether or not the
+ word match is case-insensitive. The default value is <code>false</code>.</li>
+ <li><code>word_chars</code>: Unused legacy parameter.</li>
+ </ul>
+
+
+ <p>Usage:</p>
+
+ <ul>
+ <li><code>local keyword = token(lexer.KEYWORD, word_match[[foo bar baz]])</code></li>
+ <li><code>local keyword = token(lexer.KEYWORD, word_match([[foo-bar foo-baz
+ bar-foo bar-baz baz-foo baz-bar]], true))</code></li>
+ </ul>
+
+
+ <p>Return:</p>
+
+ <ul>
+ <li>pattern</li>
+ </ul>
+
+ <h2 id="LexerList">Supported Languages</h2>
+
+ <p>Scintilla has Lua lexers for all of the languages below. Languages
+ denoted by a <code>*</code> have native
+ <a href="#lexer.Code.Folding">folders</a>. For languages without
+ native folding support, folding based on indentation can be used if
+ <code>fold.by.indentation</code> is enabled.</p>
+
+ <ol>
+ <li>Actionscript<code>*</code></li>
+ <li>Ada</li>
+ <li>ANTLR<code>*</code></li>
+ <li>APDL<code>*</code></li>
+ <li>APL</li>
+ <li>Applescript</li>
+ <li>ASM<code>*</code> (NASM)</li>
+ <li>ASP<code>*</code></li>
+ <li>AutoIt</li>
+ <li>AWK<code>*</code></li>
+ <li>Batch<code>*</code></li>
+ <li>BibTeX<code>*</code></li>
+ <li>Boo</li>
+ <li>C<code>*</code></li>
+ <li>C++<code>*</code></li>
+ <li>C#<code>*</code></li>
+ <li>ChucK</li>
+ <li>CMake<code>*</code></li>
+ <li>Coffeescript</li>
+ <li>ConTeXt<code>*</code></li>
+ <li>CSS<code>*</code></li>
+ <li>CUDA<code>*</code></li>
+ <li>D<code>*</code></li>
+ <li>Dart<code>*</code></li>
+ <li>Desktop Entry</li>
+ <li>Diff</li>
+ <li>Django<code>*</code></li>
+ <li>Dockerfile</li>
+ <li>Dot<code>*</code></li>
+ <li>Eiffel<code>*</code></li>
+ <li>Elixir</li>
+ <li>Erlang<code>*</code></li>
+ <li>F#</li>
+ <li>Faust</li>
+ <li>Fish<code>*</code></li>
+ <li>Forth</li>
+ <li>Fortran</li>
+ <li>GAP<code>*</code></li>
+ <li>gettext</li>
+ <li>Gherkin</li>
+ <li>GLSL<code>*</code></li>
+ <li>Gnuplot</li>
+ <li>Go<code>*</code></li>
+ <li>Groovy<code>*</code></li>
+ <li>Gtkrc<code>*</code></li>
+ <li>Haskell</li>
+ <li>HTML<code>*</code></li>
+ <li>Icon<code>*</code></li>
+ <li>IDL</li>
+ <li>Inform</li>
+ <li>ini</li>
+ <li>Io<code>*</code></li>
+ <li>Java<code>*</code></li>
+ <li>Javascript<code>*</code></li>
+ <li>JSON<code>*</code></li>
+ <li>JSP<code>*</code></li>
+ <li>LaTeX<code>*</code></li>
+ <li>Ledger</li>
+ <li>LESS<code>*</code></li>
+ <li>LilyPond</li>
+ <li>Lisp<code>*</code></li>
+ <li>Literate Coffeescript</li>
+ <li>Logtalk</li>
+ <li>Lua<code>*</code></li>
+ <li>Makefile</li>
+ <li>Man Page</li>
+ <li>Markdown</li>
+ <li>MATLAB<code>*</code></li>
+ <li>MoonScript</li>
+ <li>Myrddin</li>
+ <li>Nemerle<code>*</code></li>
+ <li>Nim</li>
+ <li>NSIS</li>
+ <li>Objective-C<code>*</code></li>
+ <li>OCaml</li>
+ <li>Pascal</li>
+ <li>Perl<code>*</code></li>
+ <li>PHP<code>*</code></li>
+ <li>PICO-8<code>*</code></li>
+ <li>Pike<code>*</code></li>
+ <li>PKGBUILD<code>*</code></li>
+ <li>Postscript</li>
+ <li>PowerShell<code>*</code></li>
+ <li>Prolog</li>
+ <li>Properties</li>
+ <li>Pure</li>
+ <li>Python</li>
+ <li>R</li>
+ <li>rc<code>*</code></li>
+ <li>REBOL<code>*</code></li>
+ <li>Rexx<code>*</code></li>
+ <li>ReStructuredText<code>*</code></li>
+ <li>RHTML<code>*</code></li>
+ <li>Ruby<code>*</code></li>
+ <li>Ruby on Rails<code>*</code></li>
+ <li>Rust<code>*</code></li>
+ <li>Sass<code>*</code></li>
+ <li>Scala<code>*</code></li>
+ <li>Scheme<code>*</code></li>
+ <li>Shell<code>*</code></li>
+ <li>Smalltalk<code>*</code></li>
+ <li>Standard ML</li>
+ <li>SNOBOL4</li>
+ <li>SQL</li>
+ <li>TaskPaper</li>
+ <li>Tcl<code>*</code></li>
+ <li>TeX<code>*</code></li>
+ <li>Texinfo<code>*</code></li>
+ <li>TOML</li>
+ <li>Vala<code>*</code></li>
+ <li>VBScript</li>
+ <li>vCard<code>*</code></li>
+ <li>Verilog<code>*</code></li>
+ <li>VHDL</li>
+ <li>Visual Basic</li>
+ <li>Windows Script File<code>*</code></li>
+ <li>XML<code>*</code></li>
+ <li>Xtend<code>*</code></li>
+ <li>YAML</li>
+ </ol>
+
+ <h2>Code Contributors</h2>
+
+ <ul>
+ <li>Alejandro Baez</li>
+ <li>Alex Saraci</li>
+ <li>Brian Schott</li>
+ <li>Carl Sturtivant</li>
+ <li>Chris Emerson</li>
+ <li>Christian Hesse</li>
+ <li>David B. Lamkins</li>
+ <li>Heck Fy</li>
+ <li>Jason Schindler</li>
+ <li>Jeff Stone</li>
+ <li>Joseph Eib</li>
+ <li>Joshua Krämer</li>
+ <li>Klaus Borges</li>
+ <li>Larry Hynes</li>
+ <li>M Rawash</li>
+ <li>Marc André Tanner</li>
+ <li>Markus F.X.J. Oberhumer</li>
+ <li>Martin Morawetz</li>
+ <li>Michael Forney</li>
+ <li>Michael T. Richter</li>
+ <li>Michel Martens</li>
+ <li>Murray Calavera</li>
+ <li>Neil Hodgson</li>
+ <li>Olivier Guibé</li>
+ <li>Peter Odding</li>
+ <li>Piotr Orzechowski</li>
+ <li>Richard Philips</li>
+ <li>Robert Gieseke</li>
+ <li>Roberto Ierusalimschy</li>
+ <li>S. Gilles</li>
+ <li>Stéphane Rivière</li>
+ <li>Tymur Gubayev</li>
+ <li>Wolfgang Seeberg</li>
+ </ul>
+
+ </body>
+</html>
diff --git a/doc/ScintillaDoc.html b/doc/ScintillaDoc.html
index a7e7eba36..a6a5d0a48 100644
--- a/doc/ScintillaDoc.html
+++ b/doc/ScintillaDoc.html
@@ -136,6 +136,7 @@
How to implement folding</a>.<br />
<a class="jump" href="https://bitbucket.org/StarFire/scintilla-doc/downloads/Scintilla-var'aq-Tutorial.pdf">
Beginner's Guide to lexing and folding</a>.<br />
+ <a class="jump" href="LPegLexer.html">How to write lexers in Lua</a>.<br />
The <a class="jump" href="SciCoding.html">coding style</a> used in Scintilla and SciTE is
worth following if you want to contribute code to Scintilla but is not compulsory.</p>
@@ -151,7 +152,7 @@
<p>The GTK+ version also uses messages in a similar way to the Windows version. This is
different to normal GTK+ practice but made it easier to implement rapidly.</p>
- <p>Scintilla also builds with Cocoa on OS X and with Qt, and follows the conventions of
+ <p>Scintilla also builds with Cocoa on OS X, with Qt, and with curses, and follows the conventions of
those platforms.</p>
<p>Scintilla does not properly support right-to-left languages like Arabic and Hebrew.
diff --git a/gtk/makefile b/gtk/makefile
index a28aa092d..8854a823a 100644
--- a/gtk/makefile
+++ b/gtk/makefile
@@ -79,14 +79,28 @@ CTFLAGS=-DNDEBUG -Os $(CXXBASEFLAGS) $(THREADFLAGS)
endif
CXXTFLAGS:=--std=gnu++0x $(CTFLAGS) $(REFLAGS)
+ifdef LPEG_LEXER
+CXXTFLAGS+=-DLPEG_LEXER -I ../src/lua
+LUA_CFLAGS:=-std=c99 -pedantic -Wall -I ../src/lua -DLUA_USE_POSIX -DLUA_USE_DLOPEN
+LUAOBJS:=lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o linit.o \
+ llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o \
+ ltable.o ltm.o lundump.o lvm.o lzio.o \
+ lauxlib.o lbaselib.o lbitlib.o lcorolib.o ldblib.o liolib.o \
+ lmathlib.o loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o \
+ lpcap.o lpcode.o lpprint.o lptree.o lpvm.o
+endif
CONFIGFLAGS:=$(shell pkg-config --cflags $(GTKVERSION))
MARSHALLER=scintilla-marshal.o
+all: $(COMPLIB)
+
.cxx.o:
$(CXX) $(CONFIGFLAGS) $(CXXTFLAGS) $(CXXFLAGS) -c $<
-.c.o:
+$(MARSHALLER): scintilla-marshal.c
$(CC) $(CONFIGFLAGS) $(CTFLAGS) $(CFLAGS) -w -c $<
+$(LUAOBJS): %.o: ../lua/src/%.c
+ $(CC) $(LUA_CFLAGS) $(CFLAGS) -c $<
GLIB_GENMARSHAL = glib-genmarshal
GLIB_GENMARSHAL_FLAGS = --prefix=scintilla_marshal
@@ -98,8 +112,6 @@ GLIB_GENMARSHAL_FLAGS = --prefix=scintilla_marshal
LEXOBJS:=$(addsuffix .o,$(basename $(sort $(notdir $(wildcard $(srcdir)/../lexers/Lex*.cxx)))))
-all: $(COMPLIB)
-
clean:
$(DEL) *.o $(COMPLIB) *.plist
@@ -119,7 +131,7 @@ $(COMPLIB): Accessor.o CharacterSet.o LexerBase.o LexerModule.o LexerSimple.o St
PropSetSimple.o PlatGTK.o \
KeyMap.o LineMarker.o PositionCache.o ScintillaGTK.o ScintillaGTKAccessible.o CellBuffer.o CharacterCategory.o ViewStyle.o \
RESearch.o RunStyles.o Selection.o Style.o Indicator.o AutoComplete.o UniConversion.o XPM.o \
- $(MARSHALLER) $(LEXOBJS)
+ $(MARSHALLER) $(LEXOBJS) $(LUAOBJS)
$(AR) $(ARFLAGS) $@ $^
$(RANLIB) $@
diff --git a/include/SciLexer.h b/include/SciLexer.h
index cc5139e93..70fdb97f8 100644
--- a/include/SciLexer.h
+++ b/include/SciLexer.h
@@ -135,6 +135,7 @@
#define SCLEX_JSON 120
#define SCLEX_EDIFACT 121
#define SCLEX_INDENT 122
+#define SCLEX_LPEG 999
#define SCLEX_AUTOMATIC 1000
#define SCE_P_DEFAULT 0
#define SCE_P_COMMENTLINE 1
diff --git a/include/Scintilla.iface b/include/Scintilla.iface
index f35206990..6e1740682 100644
--- a/include/Scintilla.iface
+++ b/include/Scintilla.iface
@@ -2917,6 +2917,7 @@ val SCLEX_TEHEX=119
val SCLEX_JSON=120
val SCLEX_EDIFACT=121
val SCLEX_INDENT=122
+val SCLEX_LPEG=999
# When a lexer specifies its language as SCLEX_AUTOMATIC it receives a
# value assigned in sequence from SCLEX_AUTOMATIC+1.
diff --git a/lexers/LexLPeg.cxx b/lexers/LexLPeg.cxx
new file mode 100644
index 000000000..9a44a3cdd
--- /dev/null
+++ b/lexers/LexLPeg.cxx
@@ -0,0 +1,795 @@
+/**
+ * Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+ *
+ * Lua-powered dynamic language lexer for Scintilla.
+ *
+ * For documentation on writing lexers, see *../doc/LPegLexer.html*.
+ */
+
+#if LPEG_LEXER
+
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <assert.h>
+#include <ctype.h>
+#if CURSES
+#include <curses.h>
+#endif
+
+#include "ILexer.h"
+#include "Scintilla.h"
+#include "SciLexer.h"
+
+#include "PropSetSimple.h"
+#include "LexAccessor.h"
+#include "LexerModule.h"
+
+extern "C" {
+#include "lua.h"
+#include "lualib.h"
+#include "lauxlib.h"
+LUALIB_API int luaopen_lpeg(lua_State *L);
+}
+
+#if _WIN32
+#define strcasecmp _stricmp
+#endif
+#define streq(s1, s2) (strcasecmp((s1), (s2)) == 0)
+
+#if SCI_NAMESPACE
+using namespace Scintilla;
+#endif
+
+#define l_setmetatable(l, k, mtf) \
+ if (luaL_newmetatable(l, k)) { \
+ lua_pushcfunction(l, mtf), lua_setfield(l, -2, "__index"); \
+ lua_pushcfunction(l, mtf), lua_setfield(l, -2, "__newindex"); \
+ } \
+ lua_setmetatable(l, -2);
+#define l_pushlexerp(l, mtf) do { \
+ lua_newtable(l); \
+ lua_pushvalue(l, 2), lua_setfield(l, -2, "property"); \
+ l_setmetatable(l, "sci_lexerp", mtf); \
+} while(0)
+#define l_getlexerobj(l) \
+ lua_getfield(l, LUA_REGISTRYINDEX, "sci_lexers"); \
+ lua_pushlightuserdata(l, reinterpret_cast<void *>(this)); \
+ lua_gettable(l, -2), lua_replace(l, -2);
+#define l_getlexerfield(l, k) \
+ l_getlexerobj(l); \
+ lua_getfield(l, -1, k), lua_replace(l, -2);
+#if LUA_VERSION_NUM < 502
+#define l_openlib(f, s) \
+ (lua_pushcfunction(L, f), lua_pushstring(L, s), lua_call(L, 1, 0))
+#define LUA_BASELIBNAME ""
+#define lua_rawlen lua_objlen
+#define LUA_OK 0
+#define lua_compare(l, a, b, _) lua_equal(l, a, b)
+#define LUA_OPEQ 0
+#else
+#define l_openlib(f, s) (luaL_requiref(L, s, f, 1), lua_pop(L, 1))
+#define LUA_BASELIBNAME "_G"
+#endif
+#define l_setfunction(l, f, k) (lua_pushcfunction(l, f), lua_setfield(l, -2, k))
+#define l_setconstant(l, c, k) (lua_pushinteger(l, c), lua_setfield(l, -2, k))
+
+#if CURSES
+#define A_COLORCHAR (A_COLOR | A_CHARTEXT)
+#endif
+
+/** The LPeg Scintilla lexer. */
+class LexerLPeg : public ILexer {
+ /**
+ * The lexer's Lua state.
+ * It is cleared each time the lexer language changes unless `own_lua` is
+ * `true`.
+ */
+ lua_State *L;
+ /**
+ * The flag indicating whether or not an existing Lua state was supplied as
+ * the lexer's Lua state.
+ */
+ bool own_lua;
+ /**
+ * The set of properties for the lexer.
+ * The `lexer.name`, `lexer.lpeg.home`, and `lexer.lpeg.color.theme`
+ * properties must be defined before running the lexer.
+ * For use with SciTE, all of the style property strings generated for the
+ * current lexer are placed in here.
+ */
+ PropSetSimple props;
+ /** The function to send Scintilla messages with. */
+ SciFnDirect SS;
+ /** The Scintilla object the lexer belongs to. */
+ sptr_t sci;
+ /**
+ * The flag indicating whether or not the lexer needs to be re-initialized.
+ * Re-initialization is required after the lexer language changes.
+ */
+ bool reinit;
+ /**
+ * The flag indicating whether or not the lexer language has embedded lexers.
+ */
+ bool multilang;
+ /**
+ * The list of style numbers considered to be whitespace styles.
+ * This is used in multi-language lexers when backtracking to whitespace to
+ * determine which lexer grammar to use.
+ */
+ bool ws[STYLE_MAX + 1];
+
+ /**
+ * Logs the given error message or a Lua error message, prints it, and clears
+ * the stack.
+ * Error messages are logged to the "lexer.lpeg.error" property.
+ * @param str The error message to log and print. If `NULL`, logs and prints
+ * the Lua error message at the top of the stack.
+ */
+ static void l_error(lua_State *L, const char *str=NULL) {
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_props");
+ PropSetSimple *props = static_cast<PropSetSimple *>(lua_touserdata(L, -1));
+ lua_pop(L, 1); // props
+ props->Set("lexer.lpeg.error", str ? str : lua_tostring(L, -1));
+ fprintf(stderr, "Lua Error: %s.\n", str ? str : lua_tostring(L, -1));
+ lua_settop(L, 0);
+ }
+
+ /** The lexer's `line_from_position` Lua function. */
+ static int l_line_from_position(lua_State *L) {
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_buffer");
+ IDocument *buffer = static_cast<IDocument *>(lua_touserdata(L, -1));
+ lua_pushinteger(L, buffer->LineFromPosition(luaL_checkinteger(L, 1) - 1));
+ return 1;
+ }
+
+ /** The lexer's `__index` Lua metatable. */
+ static int llexer_property(lua_State *L) {
+ int newindex = (lua_gettop(L) == 3);
+ luaL_getmetatable(L, "sci_lexer");
+ lua_getmetatable(L, 1); // metatable can be either sci_lexer or sci_lexerp
+ int is_lexer = lua_compare(L, -1, -2, LUA_OPEQ);
+ lua_pop(L, 2); // metatable, metatable
+
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_buffer");
+ IDocument *buffer = static_cast<IDocument *>(lua_touserdata(L, -1));
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_props");
+ PropSetSimple *props = static_cast<PropSetSimple *>(lua_touserdata(L, -1));
+ lua_pop(L, 2); // sci_props and sci_buffer
+
+ if (is_lexer)
+ lua_pushvalue(L, 2); // key is given
+ else
+ lua_getfield(L, 1, "property"); // indexible property
+ const char *key = lua_tostring(L, -1);
+ if (strcmp(key, "fold_level") == 0) {
+ luaL_argcheck(L, !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else
+ lua_pushinteger(L, buffer->GetLevel(luaL_checkinteger(L, 2)));
+ } else if (strcmp(key, "indent_amount") == 0) {
+ luaL_argcheck(L, !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else
+ lua_pushinteger(L, buffer->GetLineIndentation(luaL_checkinteger(L, 2)));
+ } else if (strcmp(key, "property") == 0) {
+ luaL_argcheck(L, !is_lexer || !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else if (!newindex)
+ lua_pushstring(L, props->Get(luaL_checkstring(L, 2)));
+ else
+ props->Set(luaL_checkstring(L, 2), luaL_checkstring(L, 3));
+ } else if (strcmp(key, "property_int") == 0) {
+ luaL_argcheck(L, !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else {
+ lua_pushstring(L, props->Get(luaL_checkstring(L, 2)));
+ lua_pushinteger(L, lua_tointeger(L, -1));
+ }
+ } else if (strcmp(key, "style_at") == 0) {
+ luaL_argcheck(L, !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else {
+ int style = buffer->StyleAt(luaL_checkinteger(L, 2) - 1);
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_lexer_obj");
+ lua_getfield(L, -1, "_TOKENSTYLES"), lua_replace(L, -2);
+ lua_pushnil(L);
+ while (lua_next(L, -2)) {
+ if (luaL_checkinteger(L, -1) == style) break;
+ lua_pop(L, 1); // value
+ }
+ lua_pop(L, 1); // style_num
+ }
+ } else if (strcmp(key, "line_state") == 0) {
+ luaL_argcheck(L, !is_lexer || !newindex, 3, "read-only property");
+ if (is_lexer)
+ l_pushlexerp(L, llexer_property);
+ else if (!newindex)
+ lua_pushinteger(L, buffer->GetLineState(luaL_checkinteger(L, 2)));
+ else
+ buffer->SetLineState(luaL_checkinteger(L, 2),
+ luaL_checkinteger(L, 3));
+ } else return !newindex ? (lua_rawget(L, 1), 1) : (lua_rawset(L, 1), 0);
+ return 1;
+ }
+
+ /**
+ * Expands value of the string property key at index *index* and pushes the
+ * result onto the stack.
+ * @param L The Lua State.
+ * @param index The index the string property key.
+ */
+ void lL_getexpanded(lua_State *L, int index) {
+ lua_getfield(L, LUA_REGISTRYINDEX, "_LOADED"), lua_getfield(L, -1, "lexer");
+ lua_getfield(L, -1, "property_expanded");
+ lua_pushvalue(L, (index > 0) ? index : index - 3), lua_gettable(L, -2);
+ lua_replace(L, -4), lua_pop(L, 2); // property_expanded and lexer module
+ }
+
+ /**
+ * Parses the given style string to set the properties for the given style
+ * number.
+ * @param num The style number to set properties for.
+ * @param style The style string containing properties to set.
+ */
+ void SetStyle(int num, const char *style) {
+ char *style_copy = static_cast<char *>(malloc(strlen(style) + 1));
+ char *option = strcpy(style_copy, style), *next = NULL, *p = NULL;
+ while (option) {
+ if ((next = strchr(option, ','))) *next++ = '\0';
+ if ((p = strchr(option, ':'))) *p++ = '\0';
+ if (streq(option, "font") && p)
+ SS(sci, SCI_STYLESETFONT, num, reinterpret_cast<sptr_t>(p));
+ else if (streq(option, "size") && p)
+ SS(sci, SCI_STYLESETSIZE, num, static_cast<int>(atoi(p)));
+ else if (streq(option, "bold") || streq(option, "notbold") ||
+ streq(option, "weight")) {
+#if !CURSES
+ int weight = SC_WEIGHT_NORMAL;
+ if (*option == 'b')
+ weight = SC_WEIGHT_BOLD;
+ else if (*option == 'w' && p)
+ weight = atoi(p);
+ SS(sci, SCI_STYLESETWEIGHT, num, weight);
+#else
+ // Scintilla curses requires font attributes to be stored in the "font
+ // weight" style attribute.
+ // First, clear any existing SC_WEIGHT_NORMAL, SC_WEIGHT_SEMIBOLD, or
+ // SC_WEIGHT_BOLD values stored in the lower 16 bits. Then set the
+ // appropriate curses attr.
+ sptr_t weight = SS(sci, SCI_STYLEGETWEIGHT, num, 0) & ~A_COLORCHAR;
+ int bold = *option == 'b' ||
+ (*option == 'w' && p && atoi(p) > SC_WEIGHT_NORMAL);
+ SS(sci, SCI_STYLESETWEIGHT, num,
+ bold ? weight | A_BOLD : weight & ~A_BOLD);
+#endif
+ } else if (streq(option, "italics") || streq(option, "notitalics"))
+ SS(sci, SCI_STYLESETITALIC, num, *option == 'i');
+ else if (streq(option, "underlined") || streq(option, "notunderlined")) {
+#if !CURSES
+ SS(sci, SCI_STYLESETUNDERLINE, num, *option == 'u');
+#else
+ // Scintilla curses requires font attributes to be stored in the "font
+ // weight" style attribute.
+ // First, clear any existing SC_WEIGHT_NORMAL, SC_WEIGHT_SEMIBOLD, or
+ // SC_WEIGHT_BOLD values stored in the lower 16 bits. Then set the
+ // appropriate curses attr.
+ sptr_t weight = SS(sci, SCI_STYLEGETWEIGHT, num, 0) & ~A_COLORCHAR;
+ SS(sci, SCI_STYLESETWEIGHT, num,
+ (*option == 'u') ? weight | A_UNDERLINE : weight & ~A_UNDERLINE);
+#endif
+ } else if ((streq(option, "fore") || streq(option, "back")) && p) {
+ int msg = (*option == 'f') ? SCI_STYLESETFORE : SCI_STYLESETBACK;
+ int color = static_cast<int>(strtol(p, NULL, 0));
+ if (*p == '#') { // #RRGGBB format; Scintilla format is 0xBBGGRR
+ color = static_cast<int>(strtol(p + 1, NULL, 16));
+ color = ((color & 0xFF0000) >> 16) | (color & 0xFF00) |
+ ((color & 0xFF) << 16); // convert to 0xBBGGRR
+ }
+ SS(sci, msg, num, color);
+ } else if (streq(option, "eolfilled") || streq(option, "noteolfilled"))
+ SS(sci, SCI_STYLESETEOLFILLED, num, *option == 'e');
+ else if (streq(option, "characterset") && p)
+ SS(sci, SCI_STYLESETCHARACTERSET, num, static_cast<int>(atoi(p)));
+ else if (streq(option, "case") && p) {
+ if (*p == 'u')
+ SS(sci, SCI_STYLESETCASE, num, SC_CASE_UPPER);
+ else if (*p == 'l')
+ SS(sci, SCI_STYLESETCASE, num, SC_CASE_LOWER);
+ } else if (streq(option, "visible") || streq(option, "notvisible"))
+ SS(sci, SCI_STYLESETVISIBLE, num, *option == 'v');
+ else if (streq(option, "changeable") || streq(option, "notchangeable"))
+ SS(sci, SCI_STYLESETCHANGEABLE, num, *option == 'c');
+ else if (streq(option, "hotspot") || streq(option, "nothotspot"))
+ SS(sci, SCI_STYLESETHOTSPOT, num, *option == 'h');
+ option = next;
+ }
+ free(style_copy);
+ }
+
+ /**
+ * Iterates through the lexer's `_TOKENSTYLES`, setting the style properties
+ * for all defined styles, or for SciTE, generates the set of style properties
+ * instead of directly setting style properties.
+ */
+ bool SetStyles() {
+ // If the lexer defines additional styles, set their properties first (if
+ // the user has not already defined them).
+ l_getlexerfield(L, "_EXTRASTYLES");
+ lua_pushnil(L);
+ while (lua_next(L, -2)) {
+ if (lua_isstring(L, -2) && lua_isstring(L, -1)) {
+ lua_pushstring(L, "style."), lua_pushvalue(L, -3), lua_concat(L, 2);
+ if (!*props.Get(lua_tostring(L, -1)))
+ props.Set(lua_tostring(L, -1), lua_tostring(L, -2));
+ lua_pop(L, 1); // style name
+ }
+ lua_pop(L, 1); // value
+ }
+ lua_pop(L, 1); // _EXTRASTYLES
+
+ l_getlexerfield(L, "_TOKENSTYLES");
+ if (!SS || !sci) {
+ lua_pop(L, 1); // _TOKENSTYLES
+ // Skip, but do not report an error since `reinit` would remain `false`
+ // and subsequent calls to `Lex()` and `Fold()` would repeatedly call this
+ // function and error.
+ return true;
+ }
+ lua_pushstring(L, "style.default"), lL_getexpanded(L, -1);
+ SetStyle(STYLE_DEFAULT, lua_tostring(L, -1));
+ lua_pop(L, 2); // style and "style.default"
+ SS(sci, SCI_STYLECLEARALL, 0, 0); // set default styles
+ lua_pushnil(L);
+ while (lua_next(L, -2)) {
+ if (lua_isstring(L, -2) && lua_isnumber(L, -1) &&
+ lua_tointeger(L, -1) != STYLE_DEFAULT) {
+ lua_pushstring(L, "style."), lua_pushvalue(L, -3), lua_concat(L, 2);
+ lL_getexpanded(L, -1), lua_replace(L, -2);
+ SetStyle(lua_tointeger(L, -2), lua_tostring(L, -1));
+ lua_pop(L, 1); // style
+ }
+ lua_pop(L, 1); // value
+ }
+ lua_pop(L, 1); // _TOKENSTYLES
+ return true;
+ }
+
+ /**
+ * Returns the style name for the given style number.
+ * @param style The style number to get the style name for.
+ * @return style name or NULL
+ */
+ const char *GetStyleName(int style) {
+ if (!L) return NULL;
+ const char *name = NULL;
+ l_getlexerfield(L, "_TOKENSTYLES");
+ lua_pushnil(L);
+ while (lua_next(L, -2))
+ if (lua_tointeger(L, -1) == style) {
+ name = lua_tostring(L, -2);
+ lua_pop(L, 2); // value and key
+ break;
+ } else lua_pop(L, 1); // value
+ lua_pop(L, 1); // _TOKENSTYLES
+ return name;
+ }
+
+ /**
+ * Initializes the lexer once the `lexer.lpeg.home` and `lexer.name`
+ * properties are set.
+ */
+ bool Init() {
+ char home[FILENAME_MAX], lexer[50], theme[FILENAME_MAX];
+ props.GetExpanded("lexer.lpeg.home", home);
+ props.GetExpanded("lexer.name", lexer);
+ props.GetExpanded("lexer.lpeg.color.theme", theme);
+ if (!*home || !*lexer || !L) return false;
+
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(&props));
+ lua_setfield(L, LUA_REGISTRYINDEX, "sci_props");
+
+ // If necessary, load the lexer module and theme.
+ lua_getfield(L, LUA_REGISTRYINDEX, "_LOADED"), lua_getfield(L, -1, "lexer");
+ if (lua_isnil(L, -1)) {
+ lua_pop(L, 2); // nil and _LOADED
+
+ // Modify `package.path` to find lexers.
+ lua_getglobal(L, "package"), lua_getfield(L, -1, "path");
+ int orig_path = luaL_ref(L, LUA_REGISTRYINDEX); // restore later
+ lua_pushstring(L, home), lua_pushstring(L, "/?.lua"), lua_concat(L, 2);
+ lua_setfield(L, -2, "path"), lua_pop(L, 1); // package
+
+ // Load the lexer module.
+ lua_getglobal(L, "require");
+ lua_pushstring(L, "lexer");
+ if (lua_pcall(L, 1, 1, 0) != LUA_OK) return (l_error(L), false);
+ l_setfunction(L, l_line_from_position, "line_from_position");
+ l_setconstant(L, SC_FOLDLEVELBASE, "FOLD_BASE");
+ l_setconstant(L, SC_FOLDLEVELWHITEFLAG, "FOLD_BLANK");
+ l_setconstant(L, SC_FOLDLEVELHEADERFLAG, "FOLD_HEADER");
+ l_setmetatable(L, "sci_lexer", llexer_property);
+ if (*theme) {
+ // Load the theme.
+ if (!(strstr(theme, "/") || strstr(theme, "\\"))) { // theme name
+ lua_pushstring(L, home);
+ lua_pushstring(L, "/themes/");
+ lua_pushstring(L, theme);
+ lua_pushstring(L, ".lua");
+ lua_concat(L, 4);
+ } else lua_pushstring(L, theme); // path to theme
+ if (luaL_loadfile(L, lua_tostring(L, -1)) != LUA_OK ||
+ lua_pcall(L, 0, 0, 0) != LUA_OK) return (l_error(L), false);
+ lua_pop(L, 1); // theme
+ }
+
+ // Restore `package.path`.
+ lua_getglobal(L, "package");
+ lua_getfield(L, -1, "path"), lua_setfield(L, -3, "path"); // lexer.path =
+ lua_rawgeti(L, LUA_REGISTRYINDEX, orig_path), lua_setfield(L, -2, "path");
+ luaL_unref(L, LUA_REGISTRYINDEX, orig_path), lua_pop(L, 1); // package
+ } else lua_remove(L, -2); // _LOADED
+
+ // Load the language lexer.
+ lua_getfield(L, -1, "load");
+ if (lua_isfunction(L, -1)) {
+ lua_pushstring(L, lexer), lua_pushnil(L), lua_pushboolean(L, 1);
+ if (lua_pcall(L, 3, 1, 0) != LUA_OK) return (l_error(L), false);
+ } else return (l_error(L, "'lexer.load' function not found"), false);
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_lexers");
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(this));
+ lua_pushvalue(L, -3), lua_settable(L, -3), lua_pop(L, 1); // sci_lexers
+ lua_pushvalue(L, -1), lua_setfield(L, LUA_REGISTRYINDEX, "sci_lexer_obj");
+ lua_remove(L, -2); // lexer module
+ if (!SetStyles()) return false;
+
+ // If the lexer is a parent, it will have children in its _CHILDREN table.
+ lua_getfield(L, -1, "_CHILDREN");
+ if (lua_istable(L, -1)) {
+ multilang = true;
+ // Determine which styles are language whitespace styles
+ // ([lang]_whitespace). This is necessary for determining which language
+ // to start lexing with.
+ char style_name[50];
+ for (int i = 0; i <= STYLE_MAX; i++) {
+ PrivateCall(i, reinterpret_cast<void *>(style_name));
+ ws[i] = strstr(style_name, "whitespace") ? true : false;
+ }
+ }
+ lua_pop(L, 2); // _CHILDREN and lexer object
+
+ reinit = false;
+ props.Set("lexer.lpeg.error", "");
+ return true;
+ }
+
+ /**
+ * When *lparam* is `0`, returns the size of the buffer needed to store the
+ * given string *str* in; otherwise copies *str* into the buffer *lparam* and
+ * returns the number of bytes copied.
+ * @param lparam `0` to get the number of bytes needed to store *str* or a
+ * pointer to a buffer large enough to copy *str* into.
+ * @param str The string to copy.
+ * @return number of bytes needed to hold *str*
+ */
+ void *StringResult(long lparam, const char *str) {
+ if (lparam) strcpy(reinterpret_cast<char *>(lparam), str);
+ return reinterpret_cast<void *>(strlen(str));
+ }
+
+public:
+ /** Constructor. */
+ LexerLPeg() : own_lua(true), reinit(true), multilang(false) {
+ // Initialize the Lua state, load libraries, and set platform variables.
+ if ((L = luaL_newstate())) {
+ l_openlib(luaopen_base, LUA_BASELIBNAME);
+ l_openlib(luaopen_table, LUA_TABLIBNAME);
+ l_openlib(luaopen_string, LUA_STRLIBNAME);
+#if LUA_VERSION_NUM < 502
+ l_openlib(luaopen_io, LUA_IOLIBNAME); // for `package.searchpath()`
+#endif
+ l_openlib(luaopen_package, LUA_LOADLIBNAME);
+ l_openlib(luaopen_lpeg, "lpeg");
+#if _WIN32
+ lua_pushboolean(L, 1), lua_setglobal(L, "WIN32");
+#endif
+#if __APPLE__
+ lua_pushboolean(L, 1), lua_setglobal(L, "OSX");
+#endif
+#if GTK
+ lua_pushboolean(L, 1), lua_setglobal(L, "GTK");
+#endif
+#if CURSES
+ lua_pushboolean(L, 1), lua_setglobal(L, "CURSES");
+#endif
+ lua_newtable(L), lua_setfield(L, LUA_REGISTRYINDEX, "sci_lexers");
+ } else fprintf(stderr, "Lua failed to initialize.\n");
+ SS = NULL, sci = 0;
+ }
+
+ /** Destructor. */
+ virtual ~LexerLPeg() {}
+
+ /** Destroys the lexer object. */
+ virtual void SCI_METHOD Release() {
+ if (own_lua && L)
+ lua_close(L);
+ else if (!own_lua) {
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_lexers");
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(this));
+ lua_pushnil(L), lua_settable(L, -3), lua_pop(L, 1); // sci_lexers
+ }
+ L = NULL;
+ delete this;
+ }
+
+ /**
+ * Lexes the Scintilla document.
+ * @param startPos The position in the document to start lexing at.
+ * @param lengthDoc The number of bytes in the document to lex.
+ * @param initStyle The initial style at position *startPos* in the document.
+ * @param buffer The document interface.
+ */
+ virtual void SCI_METHOD Lex(Sci_PositionU startPos, Sci_Position lengthDoc,
+ int initStyle, IDocument *buffer) {
+ LexAccessor styler(buffer);
+ if ((reinit && !Init()) || !L) {
+ // Style everything in the default style.
+ styler.StartAt(startPos);
+ styler.StartSegment(startPos);
+ styler.ColourTo(startPos + lengthDoc - 1, STYLE_DEFAULT);
+ styler.Flush();
+ return;
+ }
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(&props));
+ lua_setfield(L, LUA_REGISTRYINDEX, "sci_props");
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(buffer));
+ lua_setfield(L, LUA_REGISTRYINDEX, "sci_buffer");
+
+ // Ensure the lexer has a grammar.
+ // This could be done in the lexer module's `lex()`, but for large files,
+ // passing string arguments from C to Lua is expensive.
+ l_getlexerfield(L, "_GRAMMAR");
+ int has_grammar = !lua_isnil(L, -1);
+ lua_pop(L, 1); // _GRAMMAR
+ if (!has_grammar) {
+ // Style everything in the default style.
+ styler.StartAt(startPos);
+ styler.StartSegment(startPos);
+ styler.ColourTo(startPos + lengthDoc - 1, STYLE_DEFAULT);
+ styler.Flush();
+ return;
+ }
+
+ // Start from the beginning of the current style so LPeg matches it.
+ // For multilang lexers, start at whitespace since embedded languages have
+ // [lang]_whitespace styles. This is so LPeg can start matching child
+ // languages instead of parent ones if necessary.
+ if (startPos > 0) {
+ Sci_PositionU i = startPos;
+ while (i > 0 && styler.StyleAt(i - 1) == initStyle) i--;
+ if (multilang)
+ while (i > 0 && !ws[static_cast<size_t>(styler.StyleAt(i))]) i--;
+ lengthDoc += startPos - i, startPos = i;
+ }
+
+ Sci_PositionU startSeg = startPos, endSeg = startPos + lengthDoc;
+ int style = 0;
+ l_getlexerfield(L, "lex")
+ if (lua_isfunction(L, -1)) {
+ l_getlexerobj(L);
+ lua_pushlstring(L, buffer->BufferPointer() + startPos, lengthDoc);
+ lua_pushinteger(L, styler.StyleAt(startPos));
+ if (lua_pcall(L, 3, 1, 0) != LUA_OK) l_error(L);
+ // Style the text from the token table returned.
+ if (lua_istable(L, -1)) {
+ int len = lua_rawlen(L, -1);
+ if (len > 0) {
+ styler.StartAt(startPos);
+ styler.StartSegment(startPos);
+ l_getlexerfield(L, "_TOKENSTYLES");
+ // Loop through token-position pairs.
+ for (int i = 1; i < len; i += 2) {
+ style = STYLE_DEFAULT;
+ lua_rawgeti(L, -2, i), lua_rawget(L, -2); // _TOKENSTYLES[token]
+ if (!lua_isnil(L, -1)) style = lua_tointeger(L, -1);
+ lua_pop(L, 1); // _TOKENSTYLES[token]
+ lua_rawgeti(L, -2, i + 1); // pos
+ unsigned int position = lua_tointeger(L, -1) - 1;
+ lua_pop(L, 1); // pos
+ if (style >= 0 && style <= STYLE_MAX)
+ styler.ColourTo(startSeg + position - 1, style);
+ else
+ l_error(L, "Bad style number");
+ if (position > endSeg) break;
+ }
+ lua_pop(L, 2); // _TOKENSTYLES and token table returned
+ styler.ColourTo(endSeg - 1, style);
+ styler.Flush();
+ }
+ } else l_error(L, "Table of tokens expected from 'lexer.lex'");
+ } else l_error(L, "'lexer.lex' function not found");
+ }
+
+ /**
+ * Folds the Scintilla document.
+ * @param startPos The position in the document to start folding at.
+ * @param lengthDoc The number of bytes in the document to fold.
+ * @param initStyle The initial style at position *startPos* in the document.
+ * @param buffer The document interface.
+ */
+ virtual void SCI_METHOD Fold(Sci_PositionU startPos, Sci_Position lengthDoc,
+ int initStyle, IDocument *buffer) {
+ if ((reinit && !Init()) || !L) return;
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(&props));
+ lua_setfield(L, LUA_REGISTRYINDEX, "sci_props");
+ lua_pushlightuserdata(L, reinterpret_cast<void *>(buffer));
+ lua_setfield(L, LUA_REGISTRYINDEX, "sci_buffer");
+ LexAccessor styler(buffer);
+
+ l_getlexerfield(L, "fold");
+ if (lua_isfunction(L, -1)) {
+ l_getlexerobj(L);
+ Sci_Position currentLine = styler.GetLine(startPos);
+ lua_pushlstring(L, buffer->BufferPointer() + startPos, lengthDoc);
+ lua_pushinteger(L, startPos);
+ lua_pushinteger(L, currentLine);
+ lua_pushinteger(L, styler.LevelAt(currentLine) & SC_FOLDLEVELNUMBERMASK);
+ if (lua_pcall(L, 5, 1, 0) != LUA_OK) l_error(L);
+ // Fold the text from the fold table returned.
+ if (lua_istable(L, -1)) {
+ lua_pushnil(L);
+ while (lua_next(L, -2)) { // line = level
+ styler.SetLevel(lua_tointeger(L, -2), lua_tointeger(L, -1));
+ lua_pop(L, 1); // level
+ }
+ lua_pop(L, 1); // fold table returned
+ } else l_error(L, "Table of folds expected from 'lexer.fold'");
+ } else l_error(L, "'lexer.fold' function not found");
+ }
+
+ /** Returning the version of the lexer is not implemented. */
+ virtual int SCI_METHOD Version() const { return 0; }
+ /** Returning property names is not implemented. */
+ virtual const char * SCI_METHOD PropertyNames() { return ""; }
+ /** Returning property types is not implemented. */
+ virtual int SCI_METHOD PropertyType(const char *name) { return 0; }
+ /** Returning property descriptions is not implemented. */
+ virtual const char * SCI_METHOD DescribeProperty(const char *name) {
+ return "";
+ }
+
+ /**
+ * Sets the *key* lexer property to *value*.
+ * If *key* starts with "style.", also set the style for the token.
+ * @param key The string keyword.
+ * @param val The string value.
+ */
+ virtual Sci_Position SCI_METHOD PropertySet(const char *key,
+ const char *value) {
+ props.Set(key, *value ? value : " "); // ensure property is cleared
+ if (reinit)
+ Init();
+ else if (L && SS && sci && strncmp(key, "style.", 6) == 0) {
+ l_getlexerfield(L, "_TOKENSTYLES");
+ lua_pushstring(L, key + 6), lua_rawget(L, -2);
+ lua_pushstring(L, key), lL_getexpanded(L, -1), lua_replace(L, -2);
+ if (lua_isnumber(L, -2))
+ SetStyle(lua_tointeger(L, -2), lua_tostring(L, -1));
+ lua_pop(L, 3); // style, style number, _TOKENSTYLES
+ }
+ return -1; // no need to re-lex
+ }
+
+ /** Returning keyword list descriptions is not implemented. */
+ virtual const char * SCI_METHOD DescribeWordListSets() { return ""; }
+ /** Setting keyword lists is not applicable. */
+ virtual Sci_Position SCI_METHOD WordListSet(int n, const char *wl) {
+ return -1;
+ }
+
+ /**
+ * Allows for direct communication between the application and the lexer.
+ * The application uses this to set `SS`, `sci`, `L`, and lexer properties,
+ * and to retrieve style names.
+ * @param code The communication code.
+ * @param arg The argument.
+ * @return void *data
+ */
+ virtual void * SCI_METHOD PrivateCall(int code, void *arg) {
+ sptr_t lParam = reinterpret_cast<sptr_t>(arg);
+ const char *val = NULL;
+ switch(code) {
+ case SCI_GETDIRECTFUNCTION:
+ SS = reinterpret_cast<SciFnDirect>(lParam);
+ return NULL;
+ case SCI_SETDOCPOINTER:
+ sci = lParam;
+ return NULL;
+ case SCI_CHANGELEXERSTATE:
+ if (own_lua) lua_close(L);
+ L = reinterpret_cast<lua_State *>(lParam);
+ lua_getfield(L, LUA_REGISTRYINDEX, "sci_lexers");
+ if (lua_isnil(L, -1))
+ lua_newtable(L), lua_setfield(L, LUA_REGISTRYINDEX, "sci_lexers");
+ lua_pop(L, 1); // sci_lexers or nil
+ own_lua = false;
+ return NULL;
+ case SCI_SETLEXERLANGUAGE:
+ char lexer_name[50];
+ props.GetExpanded("lexer.name", lexer_name);
+ if (strcmp(lexer_name, reinterpret_cast<const char *>(arg)) != 0) {
+ reinit = true;
+ props.Set("lexer.lpeg.error", "");
+ PropertySet("lexer.name", reinterpret_cast<const char *>(arg));
+ } else if (L)
+ own_lua ? SetStyles() : Init();
+ return NULL;
+ case SCI_GETLEXERLANGUAGE:
+ if (L) {
+ l_getlexerfield(L, "_NAME");
+ if (SS && sci && multilang) {
+ int pos = SS(sci, SCI_GETCURRENTPOS, 0, 0);
+ while (pos >= 0 && !ws[SS(sci, SCI_GETSTYLEAT, pos, 0)]) pos--;
+ const char *name = NULL, *p = NULL;
+ if (pos >= 0) {
+ name = GetStyleName(SS(sci, SCI_GETSTYLEAT, pos, 0));
+ if (name) p = strstr(name, "_whitespace");
+ }
+ if (!name) name = lua_tostring(L, -1); // "lexer:lexer" fallback
+ if (!p) p = name + strlen(name); // "lexer:lexer" fallback
+ lua_pushstring(L, "/");
+ lua_pushlstring(L, name, p - name);
+ lua_concat(L, 3);
+ }
+ val = lua_tostring(L, -1);
+ lua_pop(L, 1); // lexer_name or lexer language string
+ }
+ return StringResult(lParam, val ? val : "null");
+ case SCI_GETSTATUS:
+ return StringResult(lParam, props.Get("lexer.lpeg.error"));
+ default: // style-related
+ if (code >= 0 && code <= STYLE_MAX) { // retrieve style names
+ val = GetStyleName(code);
+ return StringResult(lParam, val ? val : "Not Available");
+ } else return NULL;
+ }
+ }
+
+ /** Constructs a new instance of the lexer. */
+ static ILexer *LexerFactoryLPeg() { return new LexerLPeg(); }
+};
+
+LexerModule lmLPeg(SCLEX_LPEG, LexerLPeg::LexerFactoryLPeg, "lpeg");
+
+#else
+
+#include <stdlib.h>
+#include <assert.h>
+
+#include "ILexer.h"
+#include "Scintilla.h"
+#include "SciLexer.h"
+
+#include "WordList.h"
+#include "LexAccessor.h"
+#include "Accessor.h"
+#include "LexerModule.h"
+
+#if SCI_NAMESPACE
+using namespace Scintilla;
+#endif
+
+static void LPegLex(Sci_PositionU startPos, Sci_Position lengthDoc,
+ int initStyle, WordList *keywordlists[], Accessor &styler) {
+ return;
+}
+
+LexerModule lmLPeg(SCLEX_LPEG, LPegLex, "lpeg");
+
+#endif // LPEG_LEXER
diff --git a/lexlua/actionscript.lua b/lexlua/actionscript.lua
new file mode 100644
index 000000000..80d46a53c
--- /dev/null
+++ b/lexlua/actionscript.lua
@@ -0,0 +1,59 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Actionscript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('actionscript')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break continue delete do else for function if in new on return this typeof var
+ void while with NaN Infinity false null true undefined
+ -- Reserved for future use.
+ abstract case catch class const debugger default export extends final finally
+ goto implements import instanceof interface native package private Void
+ protected public dynamic static super switch synchonized throw throws
+ transient try volatile
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ Array Boolean Color Date Function Key MovieClip Math Mouse Number Object
+ Selection Sound String XML XMLNode XMLSocket
+ -- Reserved for future use.
+ boolean byte char double enum float int long short
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local ml_str = '<![CDATA[' * (lexer.any - ']]>')^0 * ']]>'
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlUuFf')^-2))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,;?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+lex:add_fold_point(lexer.STRING, '<![CDATA[', ']]>')
+
+return lex
diff --git a/lexlua/ada.lua b/lexlua/ada.lua
new file mode 100644
index 000000000..f1db9f1fb
--- /dev/null
+++ b/lexlua/ada.lua
@@ -0,0 +1,57 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Ada LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ada')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abort abs accept all and begin body case declare delay do else elsif end entry
+ exception exit for generic goto if in is loop mod new not null or others out
+ protected raise record rem renames requeue reverse select separate subtype
+ task terminate then type until when while xor
+ -- Preprocessor.
+ package pragma use with
+ -- Function.
+ function procedure return
+ -- Storage class.
+ abstract access aliased array at constant delta digits interface limited of
+ private range tagged synchronized
+ -- Boolean.
+ true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ boolean character count duration float integer long_float long_integer
+ priority short_float short_integer string
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"', true, true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * lexer.nonnewline^0))
+
+-- Numbers.
+local hex_num = 'O' * S('xX') * (lexer.xdigit + '_')^1
+local integer = lexer.digit^1 * ('_' * lexer.digit^1)^0
+local float = integer^1 * ('.' * integer^0)^-1 * S('eE') * S('+-')^-1 * integer
+lex:add_rule('number', token(lexer.NUMBER, hex_num +
+ S('+-')^-1 * (float + integer) *
+ S('LlUuFf')^-3))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(':;=<>&+-*/.()')))
+
+return lex
diff --git a/lexlua/ansi_c.lua b/lexlua/ansi_c.lua
new file mode 100644
index 000000000..4f961e67b
--- /dev/null
+++ b/lexlua/ansi_c.lua
@@ -0,0 +1,90 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- C LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ansi_c')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ auto break case const continue default do else extern for goto if inline
+ register restrict return sizeof static switch typedef volatile while
+ -- C11.
+ _Alignas _Alignof _Atomic _Generic _Noreturn _Static_assert _Thread_local
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ char double enum float int long short signed struct union unsigned void
+ _Bool _Complex _Imaginary
+ -- Stdlib types.
+ ptrdiff_t size_t max_align_t wchar_t intptr_t uintptr_t intmax_t uintmax_t
+]] + P('u')^-1 * 'int' * (P('_least') + '_fast')^-1 * R('09')^1 * '_t'))
+
+-- Constants.
+lex:add_rule('constants', token(lexer.CONSTANT, word_match[[
+ NULL
+ -- Preprocessor.
+ __DATE__ __FILE__ __LINE__ __TIME__ __func__
+ -- errno.h.
+ E2BIG EACCES EADDRINUSE EADDRNOTAVAIL EAFNOSUPPORT EAGAIN EALREADY EBADF
+ EBADMSG EBUSY ECANCELED ECHILD ECONNABORTED ECONNREFUSED ECONNRESET EDEADLK
+ EDESTADDRREQ EDOM EDQUOT EEXIST EFAULT EFBIG EHOSTUNREACH EIDRM EILSEQ
+ EINPROGRESS EINTR EINVAL EIO EISCONN EISDIR ELOOP EMFILE EMLINK EMSGSIZE
+ EMULTIHOP ENAMETOOLONG ENETDOWN ENETRESET ENETUNREACH ENFILE ENOBUFS ENODATA
+ ENODEV ENOENT ENOEXEC ENOLCK ENOLINK ENOMEM ENOMSG ENOPROTOOPT ENOSPC ENOSR
+ ENOSTR ENOSYS ENOTCONN ENOTDIR ENOTEMPTY ENOTRECOVERABLE ENOTSOCK ENOTSUP
+ ENOTTY ENXIO EOPNOTSUPP EOVERFLOW EOWNERDEAD EPERM EPIPE EPROTO
+ EPROTONOSUPPORT EPROTOTYPE ERANGE EROFS ESPIPE ESRCH ESTALE ETIME ETIMEDOUT
+ ETXTBSY EWOULDBLOCK EXDEV
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1 +
+ lexer.starts_line('#if') * S(' \t')^0 * '0' *
+ lexer.space *
+ (lexer.any - lexer.starts_line('#endif'))^0 *
+ (lexer.starts_line('#endif'))^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define elif else endif if ifdef ifndef line pragma undef
+]]
+lex:add_rule('preprocessor',
+ #lexer.starts_line('#') *
+ (token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * preproc_word) +
+ token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * 'include') *
+ (token(lexer.WHITESPACE, S('\t ')^1) *
+ token(lexer.STRING,
+ lexer.delimited_range('<>', true, true)))^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, '#if', '#endif')
+lex:add_fold_point(lexer.PREPROCESSOR, '#ifdef', '#endif')
+lex:add_fold_point(lexer.PREPROCESSOR, '#ifndef', '#endif')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/antlr.lua b/lexlua/antlr.lua
new file mode 100644
index 000000000..7d1fa3b50
--- /dev/null
+++ b/lexlua/antlr.lua
@@ -0,0 +1,57 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- ANTLR LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('antlr')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract break case catch continue default do else extends final finally for
+ if implements instanceof native new private protected public return static
+ switch synchronized throw throws transient try volatile
+ while package import header options tokens strictfp
+ false null super this true
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ boolean byte char class double float int interface long short void
+]]))
+
+-- Functions.
+lex:add_rule('func', token(lexer.FUNCTION, 'assert'))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Actions.
+lex:add_rule('action', token(lexer.OPERATOR, P('{')) *
+ token('action', (1 - P('}'))^0) *
+ token(lexer.OPERATOR, P('}'))^-1)
+lex:add_style('action', lexer.STYLE_NOTHING)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('$@:;|.=+*?~!^>-()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, ':', ';')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/apdl.lua b/lexlua/apdl.lua
new file mode 100644
index 000000000..5a1b2e164
--- /dev/null
+++ b/lexlua/apdl.lua
@@ -0,0 +1,74 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- APDL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('apdl', {case_insensitive_fold_points = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ *abbr *abb *afun *afu *ask *cfclos *cfc *cfopen *cfo *cfwrite *cfw *create
+ *cre *cycle *cyc *del *dim *do *elseif *else *enddo *endif *end *eval *eva
+ *exit *exi *get *go *if *list *lis *mfouri *mfo *mfun *mfu *mooney *moo *moper
+ *mop *msg *repeat *rep *set *status *sta *tread *tre *ulib *uli *use *vabs
+ *vab *vcol *vco *vcum *vcu *vedit *ved *vfact *vfa *vfill *vfi *vfun *vfu
+ *vget *vge *vitrp *vit *vlen *vle *vmask *vma *voper *vop *vplot *vpl *vput
+ *vpu *vread *vre *vscfun *vsc *vstat *vst *vwrite *vwr
+ /anfile /anf /angle /ang /annot /ann /anum /anu /assign /ass /auto /aut /aux15
+ /aux2 /aux /axlab /axl /batch /bat /clabel /cla /clear /cle /clog /clo /cmap
+ /cma /color /col /com /config /contour /con /copy /cop /cplane /cpl /ctype
+ /cty /cval /cva /delete /del /devdisp /device /dev /dist /dis /dscale /dsc
+ /dv3d /dv3 /edge /edg /efacet /efa /eof /erase /era /eshape /esh /exit /exi
+ /expand /exp /facet /fac /fdele /fde /filname /fil /focus /foc /format /for
+ /ftype /fty /gcmd /gcm /gcolumn /gco /gfile /gfi /gformat /gfo /gline /gli
+ /gmarker /gma /golist /gol /gopr /gop /go /graphics /gra /gresume /gre /grid
+ /gri /gropt /gro /grtyp /grt /gsave /gsa /gst /gthk /gth /gtype /gty /header
+ /hea /input /inp /larc /lar /light /lig /line /lin /lspec /lsp /lsymbol /lsy
+ /menu /men /mplib /mpl /mrep /mre /mstart /mst /nerr /ner /noerase /noe
+ /nolist /nol /nopr /nop /normal /nor /number /num /opt /output /out /page /pag
+ /pbc /pbf /pcircle /pci /pcopy /pco /plopts /plo /pmacro /pma /pmeth /pme
+ /pmore /pmo /pnum /pnu /polygon /pol /post26 /post1 /pos /prep7 /pre /psearch
+ /pse /psf /pspec /psp /pstatus /pst /psymb /psy /pwedge /pwe /quit /qui /ratio
+ /rat /rename /ren /replot /rep /reset /res /rgb /runst /run /seclib /sec /seg
+ /shade /sha /showdisp /show /sho /shrink /shr /solu /sol /sscale /ssc /status
+ /sta /stitle /sti /syp /sys /title /tit /tlabel /tla /triad /tri /trlcy /trl
+ /tspec /tsp /type /typ /ucmd /ucm /uis /ui /units /uni /user /use /vcone /vco
+ /view /vie /vscale /vsc /vup /wait /wai /window /win /xrange /xra /yrange /yra
+ /zoom /zoo
+]], true))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range("'", true, true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION,
+ lexer.delimited_range('%', true, true)))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, lexer.starts_line(':') * lexer.word))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '!' * lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/$=,;()')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, '*if', '*endif')
+lex:add_fold_point(lexer.KEYWORD, '*do', '*enddo')
+lex:add_fold_point(lexer.KEYWORD, '*dowhile', '*enddo')
+lex:add_fold_point(lexer.COMMENT, '!', lexer.fold_line_comments('!'))
+
+return lex
diff --git a/lexlua/apl.lua b/lexlua/apl.lua
new file mode 100644
index 000000000..5275ebffa
--- /dev/null
+++ b/lexlua/apl.lua
@@ -0,0 +1,57 @@
+-- Copyright 2015-2018 David B. Lamkins <david@lamkins.net>. See License.txt.
+-- APL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('apl')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, (P('⍝') + '#') *
+ lexer.nonnewline^0))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"')
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Numbers.
+local dig = R('09')
+local rad = P('.')
+local exp = S('eE')
+local img = S('jJ')
+local sgn = P('¯')^-1
+local float = sgn * (dig^0 * rad * dig^1 + dig^1 * rad * dig^0 + dig^1) *
+ (exp * sgn *dig^1)^-1
+lex:add_rule('number', token(lexer.NUMBER, float * img * float + float))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, P('⍞') + 'χ' + '⍺' + '⍶' + '⍵' +
+ '⍹' + '⎕' * R('AZ', 'az')^0))
+
+-- Names.
+local n1l = R('AZ', 'az')
+local n1b = P('_') + '∆' + '⍙'
+local n2l = n1l + R('09')
+local n2b = n1b + '¯'
+local n1 = n1l + n1b
+local n2 = n2l + n2b
+local name = n1 * n2^0
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, name * ':'))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, name))
+
+-- Special.
+lex:add_rule('special', token(lexer.TYPE, S('{}[]();') + '←' + '→' + '◊'))
+
+-- Nabla.
+lex:add_rule('nabla', token(lexer.PREPROCESSOR, P('∇') + '⍫'))
+
+return lex
diff --git a/lexlua/applescript.lua b/lexlua/applescript.lua
new file mode 100644
index 000000000..60a67383b
--- /dev/null
+++ b/lexlua/applescript.lua
@@ -0,0 +1,69 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Applescript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('applescript')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ script property prop end copy to set global local on to of in given with
+ without return continue tell if then else repeat times while until from exit
+ try error considering ignoring timeout transaction my get put into is
+ -- References.
+ each some every whose where id index first second third fourth fifth sixth
+ seventh eighth ninth tenth last front back st nd rd th middle named through
+ thru before after beginning the
+ -- Commands.
+ close copy count delete duplicate exists launch make move open print quit
+ reopen run save saving
+ -- Operators.
+ div mod and not or as contains equal equals isn't
+]], true))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ case diacriticals expansion hyphens punctuation
+ -- Predefined variables.
+ it me version pi result space tab anything
+ -- Text styles.
+ bold condensed expanded hidden italic outline plain shadow strikethrough
+ subscript superscript underline
+ -- Save options.
+ ask no yes
+ -- Booleans.
+ false true
+ -- Date and time.
+ weekday monday mon tuesday tue wednesday wed thursday thu friday fri saturday
+ sat sunday sun month january jan february feb march mar april apr may june jun
+ july jul august aug september sep october oct november nov december dec
+ minutes hours days weeks
+]], true))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') *
+ lexer.alnum^0))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '--' * lexer.nonnewline^0
+local block_comment = '(*' * (lexer.any - '*)')^0 * P('*)')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-^*/&<>=:,(){}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '(*', '*)')
+
+return lex
diff --git a/lexlua/asm.lua b/lexlua/asm.lua
new file mode 100644
index 000000000..b2e137146
--- /dev/null
+++ b/lexlua/asm.lua
@@ -0,0 +1,363 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- NASM Assembly LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('asm')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Preprocessor macros.
+ struc endstruc istruc at iend align alignb sectalign .nolist
+ -- Preprocessor Packages.
+ --altreg smartalign fp ifunc
+ -- Directives.
+ absolute bits class common common cpu default export extern float global group
+ import osabi overlay private public __SECT__ section segment stack use16 use32
+ use64
+ -- Section Names.
+ .bss .comment .data .lbss .ldata .lrodata .rdata .rodata .tbss .tdata .text
+ -- Section Qualifiers.
+ alloc bss code exec data noalloc nobits noexec nowrite progbits rdata tls
+ write
+ -- Operators.
+ abs rel seg wrt strict
+ __utf16__ __utf16be__ __utf16le__ __utf32__ __utf32be__ __utf32le__
+]]))
+
+-- Instructions.
+-- awk '{print $1}'|uniq|tr '[:upper:]' '[:lower:]'|
+-- lua -e "for l in io.lines() do print(\"'\"..l..\"',\") end"|fmt -w 78
+lex:add_rule('instruction', token('instruction', word_match[[
+ -- Special Instructions.
+ db dd do dq dt dw dy resb resd reso resq rest resw resy
+ -- Conventional Instructions.
+ aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt
+ btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw
+ cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde
+ daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs
+ fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi
+ fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni
+ ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist
+ fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2
+ fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv
+ fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin
+ fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom
+ fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp
+ idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd
+ invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf
+ lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286
+ lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence
+ monitor mov movd movq movsb movsd movsq movsw movsx movsxd movsx movzx mul
+ mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb
+ paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb
+ pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc
+ pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2
+ pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa
+ pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf
+ popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld
+ psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw
+ punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw
+ pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret
+ retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd
+ scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold
+ smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs
+ syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw
+ fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmovcc jcc setcc
+ -- Katmai Streaming SIMD instructions (SSE -- a.k.a. KNI XMM MMX2).
+ addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss
+ cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss
+ cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si
+ cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps
+ movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps
+ rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss
+ unpckhps unpcklps xorps
+ -- Introduced in Deschutes but necessary for SSE support.
+ fxrstor fxrstor64 fxsave fxsave64
+ -- XSAVE group (AVX and extended state).
+ xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64
+ -- Generic memory operations.
+ prefetchnta prefetcht0 prefetcht1 prefetcht2 sfence
+ -- New MMX instructions introduced in Katmai.
+ maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb
+ pmulhuw psadbw pshufw
+ -- AMD Enhanced 3DNow! (Athlon) instructions.
+ pf2iw pfnacc pfpnacc pi2fw pswapd
+ -- Willamette SSE2 Cacheability Instructions.
+ maskmovdqu clflush movntdq movnti movntpd lfence mfence
+ -- Willamette MMX instructions (SSE2 SIMD Integer Instructions).
+ movd movdqa movdqu movdq2q movq movq2dq packsswb packssdw packuswb paddb paddw
+ paddd paddq paddsb paddsw paddusb paddusw pand pandn pavgb pavgw pcmpeqb
+ pcmpeqw pcmpeqd pcmpgtb pcmpgtw pcmpgtd pextrw pinsrw pmaddwd pmaxsw pmaxub
+ pminsw pminub pmovmskb pmulhuw pmulhw pmullw pmuludq por psadbw pshufd pshufhw
+ pshuflw pslldq psllw pslld psllq psraw psrad psrldq psrlw psrld psrlq psubb
+ psubw psubd psubq psubsb psubsw psubusb psubusw punpckhbw punpckhwd punpckhdq
+ punpckhqdq punpcklbw punpcklwd punpckldq punpcklqdq pxor
+ -- Willamette Streaming SIMD instructions (SSE2).
+ addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd
+ cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd
+ cmpunordpd cmpunordsd cmppd cmpsd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi
+ cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd
+ cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd
+ movapd movhpd movlpd movmskpd movsd movupd mulpd mulsd orpd shufpd sqrtpd
+ sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd
+ -- Prescott New Instructions (SSE3).
+ addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup
+ -- VMX/SVM Instructions.
+ clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread
+ vmresume vmrun vmsave vmwrite vmxoff vmxon
+ -- Extended Page Tables VMX instructions.
+ invept invvpid
+ -- Tejas New Instructions (SSSE3).
+ pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw
+ pmaddubsw pmulhrsw pshufb psignb psignw psignd
+ -- AMD SSE4A.
+ extrq insertq movntsd movntss
+ -- New instructions in Barcelona.
+ lzcnt
+ -- Penryn New Instructions (SSE4.1).
+ blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa
+ mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq pextrw
+ phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd
+ pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw
+ pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd
+ roundps roundsd roundss
+ -- Nehalem New Instructions (SSE4.2).
+ crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt
+ -- Intel SMX.
+ getsec
+ -- Geode (Cyrix) 3DNow! additions.
+ pfrcpv pfrsqrtv
+ -- Intel new instructions in ???.
+ movbe
+ -- Intel AES instructions.
+ aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist
+ -- Intel AVX AES instructions.
+ vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist
+ -- Intel AVX instructions.
+ vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps
+ vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128
+ vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd
+ vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd
+ vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd
+ vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd
+ vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmpeq_ospd vcmplt_oqpd
+ vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd
+ vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd
+ vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps
+ vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps
+ vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps
+ vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps
+ vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps
+ vcmptrue_uqps vcmptrueps vcmpeq_osps vcmplt_oqps vcmple_oqps vcmpunord_sps
+ vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps
+ vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps
+ vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd
+ vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd
+ vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd
+ vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd
+ vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmpeq_ossd
+ vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd
+ vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd
+ vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss
+ vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss
+ vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss
+ vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss
+ vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss
+ vcmptrue_uqss vcmptruess vcmpeq_osss vcmplt_oqss vcmple_oqss vcmpunord_sss
+ vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss
+ vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss
+ vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq
+ vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si
+ vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd
+ vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128
+ vinsertps vlddqu vldqqu vlddqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd
+ vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd
+ vmovq vmovddup vmovdqa vmovqqa vmovdqa vmovdqu vmovqqu vmovdqu vmovhlps
+ vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq
+ vmovntdq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd
+ vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd
+ vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb
+ vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb
+ vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw
+ vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps
+ vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw
+ vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd
+ vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw
+ vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq
+ vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd
+ vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq
+ vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd
+ vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest
+ vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw
+ vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq
+ vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss
+ vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd
+ vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd
+ vunpcklps vxorpd vxorps vzeroall vzeroupper
+ -- Intel Carry-Less Multiplication instructions (CLMUL).
+ pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq
+ -- Intel AVX Carry-Less Multiplication instructions (CLMUL).
+ vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq
+ -- Intel Fused Multiply-Add instructions (FMA).
+ vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd
+ vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd
+ vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps
+ vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd
+ vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd
+ vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd
+ vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps
+ vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd
+ vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps
+ vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps
+ vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps
+ vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps
+ vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss
+ vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss
+ vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss
+ vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss
+ vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss
+ vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss
+ vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss
+ vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss
+ vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd
+ -- Intel post-32 nm processor instructions.
+ rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox
+ rdseed clac stac
+ -- VIA (Centaur) security instructions.
+ xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256
+ -- AMD Lightweight Profiling (LWP) instructions.
+ llwpcb slwpcb lwpval lwpins
+ -- AMD XOP and FMA4 instructions (SSE5).
+ vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd
+ vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd
+ vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd
+ vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw
+ vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq
+ vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd
+ vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww
+ vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw
+ vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw
+ -- Intel AVX2 instructions.
+ vmpsadbw vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackusdw vpackuswb vpaddb
+ vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn
+ vpavgb vpavgw vpblendvb vpblendw vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb
+ vpcmpgtw vpcmpgtd vpcmpgtq vphaddw vphaddd vphaddsw vphsubw vphsubd vphsubsw
+ vpmaddubsw vpmaddwd vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb
+ vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd
+ vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq
+ vpmovzxwd vpmovzxwq vpmovzxdq vpmuldq vpmulhrsw vpmulhuw vpmulhw vpmullw
+ vpmulld vpmuludq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb
+ vpsignw vpsignd vpslldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrldq vpsrlw
+ vpsrld vpsrlq vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw
+ vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq
+ vpunpcklqdq vpxor vmovntdqa vbroadcastss vbroadcastsd vbroadcasti128 vpblendd
+ vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps
+ vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpmaskmovd
+ vpmaskmovq vpsllvd vpsllvq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vpsrlvd
+ vpsrlvq vgatherdpd vgatherqpd vgatherdpd vgatherqpd vgatherdps vgatherqps
+ vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdd vpgatherqd vpgatherdq
+ vpgatherqq vpgatherdq vpgatherqq
+ -- Transactional Synchronization Extensions (TSX).
+ xabort xbegin xend xtest
+ -- Intel BMI1 and BMI2 instructions AMD TBM instructions.
+ andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi
+ mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc
+ -- Systematic names for the hinting nop instructions.
+ hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6
+ hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13
+ hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20
+ hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27
+ hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34
+ hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41
+ hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48
+ hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55
+ hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62
+ hint_nop63
+]]))
+lex:add_style('instruction', lexer.STYLE_FUNCTION)
+
+-- Registers.
+lex:add_rule('register', token('register', word_match[[
+ -- 32-bit registers.
+ ah al ax bh bl bp bx ch cl cx dh di dl dx eax ebx ebx ecx edi edx esi esp fs
+ mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 si st0 st1 st2 st3 st4 st5 st6 st7 xmm0 xmm1
+ xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7
+ -- 64-bit registers.
+ bpl dil gs r8 r8b r8w r9 r9b r9w r10 r10b r10w r11 r11b r11w r12 r12b r12w r13
+ r13b r13w r14 r14b r14w r15 r15b r15w rax rbp rbx rcx rdi rdx rsi rsp sil xmm8
+ xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13
+ ymm14 ymm15
+]]))
+lex:add_style('register', lexer.STYLE_CONSTANT)
+
+-- Types.
+local sizes = word_match[[
+ byte word dword qword tword oword yword
+ a16 a32 a64 o16 o32 o64 -- instructions
+]]
+local wrt_types = '..' * word_match[[
+ start gotpc gotoff gottpoff got plt sym tlsie
+]]
+lex:add_rule('type', token(lexer.TYPE, sizes + wrt_types))
+
+local word = (lexer.alpha + S('$._?')) * (lexer.alnum + S('$._?#@~'))^0
+
+-- Constants.
+local constants = word_match[[
+ __float128h__ __float128l__ __float16__ __float32__ __float64__ __float8__
+ __float80e__ __float80m__ __Infinity__ __NaN__ __QNaN__ __SNaN__
+]]
+lex:add_rule('constant', token(lexer.CONSTANT, constants +
+ '$' * P('$')^-1 * -word))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, word * ':'))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, ';' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float +
+ lexer.integer * S('hqb')^-1))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ arg assign clear define defstr deftok depend elif elifctx elifdef elifempty
+ elifenv elifid elifidn elifidni elifmacro elifn elifnctx elifndef elifnempty
+ elifnenv elifnid elifnidn elifnidni elifnmacro elifnnum elifnstr elifntoken
+ elifnum elifstr eliftoken else endif endmacro endrep endwhile error exitmacro
+ exitrep exitwhile fatal final idefine idefstr ideftok if ifctx ifdef ifempty
+ ifenv ifid ifidn ifidni ifmacro ifn ifnctx ifndef ifnempty ifnenv ifnid ifnidn
+ ifnidni ifnmacro ifnnum ifnstr ifntoken ifnum ifstr iftoken imacro include
+ ixdefine line local macro pathsearch pop push rep repl rmacro rotate stacksize
+ strcat strlen substr undef unmacro use warning while xdefine
+]]
+local preproc_symbol = '??' + S('!$+?') + '%' * -lexer.space + R('09')^1
+lex:add_rule('preproc', token(lexer.PREPROCESSOR, '%' * (preproc_word +
+ preproc_symbol)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|~:,()[]')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, '%if', '%endif')
+lex:add_fold_point(lexer.PREPROCESSOR, '%macro', '%endmacro')
+lex:add_fold_point(lexer.PREPROCESSOR, '%rep', '%endrep')
+lex:add_fold_point(lexer.PREPROCESSOR, '%while', '%endwhile')
+lex:add_fold_point(lexer.KEYWORD, 'struc', 'endstruc')
+lex:add_fold_point(lexer.COMMENT, ';', lexer.fold_line_comments(';'))
+
+return lex
diff --git a/lexlua/asp.lua b/lexlua/asp.lua
new file mode 100644
index 000000000..ef955c262
--- /dev/null
+++ b/lexlua/asp.lua
@@ -0,0 +1,34 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- ASP LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local html = lexer.load('html')
+local lex = lexer.new('asp', {inherit = html}) -- proxy for HTML
+
+-- Embedded VB.
+local vb = lexer.load('vb')
+local vb_start_rule = token('asp_tag', '<%' * P('=')^-1)
+local vb_end_rule = token('asp_tag', '%>')
+lex:embed(vb, vb_start_rule, vb_end_rule)
+lex:add_style('asp_tag', lexer.STYLE_EMBEDDED)
+
+-- Embedded VBScript.
+local vbs = lexer.load('vbscript')
+local script_element = word_match('script', true)
+local vbs_start_rule = #(P('<') * script_element * (P(function(input, index)
+ if input:find('^%s+language%s*=%s*(["\'])vbscript%1', index) or
+ input:find('^%s+type%s*=%s*(["\'])text/vbscript%1', index) then
+ return index
+ end
+end) + '>')) * html.embed_start_tag -- <script language="vbscript">
+local vbs_end_rule = #('</' * script_element * lexer.space^0 * '>') *
+ html.embed_end_tag -- </script>
+lex:embed(vbs, vbs_start_rule, vbs_end_rule)
+
+-- Fold points.
+lex:add_fold_point('asp_tag', '<%', '%>')
+
+return lex
diff --git a/lexlua/autoit.lua b/lexlua/autoit.lua
new file mode 100644
index 000000000..68121bb69
--- /dev/null
+++ b/lexlua/autoit.lua
@@ -0,0 +1,132 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- AutoIt LPeg lexer.
+-- Contributed by Jeff Stone.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('autoit')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ False True And Or Not ContinueCase ContinueLoop Default Dim Global Local Const
+ Do Until Enum Exit ExitLoop For To Step Next In Func Return EndFunc If Then
+ ElseIf Else EndIf Null ReDim Select Case EndSelect Static Switch EndSwitch
+ Volatile While WEnd With EndWith
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ Abs ACos AdlibRegister AdlibUnRegister Asc AscW ASin Assign ATan
+ AutoItSetOption AutoItWinGetTitle AutoItWinSetTitle Beep Binary BinaryLen
+ BinaryMid BinaryToString BitAND BitNOT BitOR BitRotate BitShift BitXOR
+ BlockInput Break Call CDTray Ceiling Chr ChrW ClipGet ClipPut ConsoleRead
+ ConsoleWrite ConsoleWriteError ControlClick ControlCommand ControlDisable
+ ControlEnable ControlFocus ControlGetFocus ControlGetHandle ControlGetPos
+ ControlGetText ControlHide ControlListView ControlMove ControlSend
+ ControlSetText ControlShow ControlTreeView Cos Dec DirCopy DirCreate
+ DirGetSize DirMove DirRemove DllCall DllCallAddress DllCallbackFree
+ DllCallbackGetPtr DllCallbackRegister DllClose DllOpen DllStructCreate
+ DllStructGetData DllStructGetPtr DllStructGetSize DllStructSetData
+ DriveGetDrive DriveGetFileSystem DriveGetLabel DriveGetSerial DriveGetType
+ DriveMapAdd DriveMapDel DriveMapGet DriveSetLabel DriveSpaceFree
+ DriveSpaceTotal DriveStatus EnvGet EnvSet EnvUpdate Eval Execute Exp
+ FileChangeDir FileClose FileCopy FileCreateNTFSLink FileCreateShortcut
+ FileDelete FileExists FileFindFirstFile FileFindNextFile FileFlush
+ FileGetAttrib FileGetEncoding FileGetLongName FileGetPos FileGetShortcut
+ FileGetShortName FileGetSize FileGetTime FileGetVersion FileInstall FileMove
+ FileOpen FileOpenDialog FileRead FileReadLine FileReadToArray FileRecycle
+ FileRecycleEmpty FileSaveDialog FileSelectFolder FileSetAttrib FileSetEnd
+ FileSetPos FileSetTime FileWrite FileWriteLine Floor FtpSetProxy FuncName
+ GUICreate GUICtrlCreateAvi GUICtrlCreateButton GUICtrlCreateCheckbox
+ GUICtrlCreateCombo GUICtrlCreateContextMenu GUICtrlCreateDate
+ GUICtrlCreateDummy GUICtrlCreateEdit GUICtrlCreateGraphic GUICtrlCreateGroup
+ GUICtrlCreateIcon GUICtrlCreateInput GUICtrlCreateLabel GUICtrlCreateList
+ GUICtrlCreateListView GUICtrlCreateListViewItem GUICtrlCreateMenu
+ GUICtrlCreateMenuItem GUICtrlCreateMonthCal GUICtrlCreateObj GUICtrlCreatePic
+ GUICtrlCreateProgress GUICtrlCreateRadio GUICtrlCreateSlider GUICtrlCreateTab
+ GUICtrlCreateTabItem GUICtrlCreateTreeView GUICtrlCreateTreeViewItem
+ GUICtrlCreateUpdown GUICtrlDelete GUICtrlGetHandle GUICtrlGetState GUICtrlRead
+ GUICtrlRecvMsg GUICtrlRegisterListViewSort GUICtrlSendMsg GUICtrlSendToDummy
+ GUICtrlSetBkColor GUICtrlSetColor GUICtrlSetCursor GUICtrlSetData
+ GUICtrlSetDefBkColor GUICtrlSetDefColor GUICtrlSetFont GUICtrlSetGraphic
+ GUICtrlSetImage GUICtrlSetLimit GUICtrlSetOnEvent GUICtrlSetPos
+ GUICtrlSetResizing GUICtrlSetState GUICtrlSetStyle GUICtrlSetTip GUIDelete
+ GUIGetCursorInfo GUIGetMsg GUIGetStyle GUIRegisterMsg GUISetAccelerators
+ GUISetBkColor GUISetCoord GUISetCursor GUISetFont GUISetHelp GUISetIcon
+ GUISetOnEvent GUISetState GUISetStyle GUIStartGroup GUISwitch Hex HotKeySet
+ HttpSetProxy HttpSetUserAgent HWnd InetClose InetGet InetGetInfo InetGetSize
+ InetRead IniDelete IniRead IniReadSection IniReadSectionNames IniRenameSection
+ IniWrite IniWriteSection InputBox Int IsAdmin IsArray IsBinary IsBool
+ IsDeclared IsDllStruct IsFloat IsFunc IsHWnd IsInt IsKeyword IsNumber IsObj
+ IsPtr IsString Log MemGetStats Mod MouseClick MouseClickDrag MouseDown
+ MouseGetCursor MouseGetPos MouseMove MouseUp MouseWheel MsgBox Number
+ ObjCreate ObjCreateInterface ObjEvent ObjGet ObjName OnAutoItExitRegister
+ OnAutoItExitUnRegister Ping PixelChecksum PixelGetColor PixelSearch
+ ProcessClose ProcessExists ProcessGetStats ProcessList ProcessSetPriority
+ ProcessWait ProcessWaitClose ProgressOff ProgressOn ProgressSet Ptr Random
+ RegDelete RegEnumKey RegEnumVal RegRead RegWrite Round Run RunAs RunAsWait
+ RunWait Send SendKeepActive SetError SetExtended ShellExecute ShellExecuteWait
+ Shutdown Sin Sleep SoundPlay SoundSetWaveVolume SplashImageOn SplashOff
+ SplashTextOn Sqrt SRandom StatusbarGetText StderrRead StdinWrite StdioClose
+ StdoutRead String StringAddCR StringCompare StringFormat StringFromASCIIArray
+ StringInStr StringIsAlNum StringIsAlpha StringIsASCII StringIsDigit
+ StringIsFloat StringIsInt StringIsLower StringIsSpace StringIsUpper
+ StringIsXDigit StringLeft StringLen StringLower StringMid StringRegExp
+ StringRegExpReplace StringReplace StringReverse StringRight StringSplit
+ StringStripCR StringStripWS StringToASCIIArray StringToBinary StringTrimLeft
+ StringTrimRight StringUpper Tan TCPAccept TCPCloseSocket TCPConnect
+ TCPListen TCPNameToIP TCPRecv TCPSend TCPShutdown TCPStartup TimerDiff
+ TimerInit ToolTip TrayCreateItem TrayCreateMenu TrayGetMsg TrayItemDelete
+ TrayItemGetHandle TrayItemGetState TrayItemGetText TrayItemSetOnEvent
+ TrayItemSetState TrayItemSetText TraySetClick TraySetIcon TraySetOnEvent
+ TraySetPauseIcon TraySetState TraySetToolTip TrayTip UBound UDPBind
+ UDPCloseSocket UDPOpen UDPRecv UDPSend UDPShutdown UDPStartup VarGetType
+ WinActivate WinActive WinClose WinExists WinFlash WinGetCaretPos
+ WinGetClassList WinGetClientSize WinGetHandle WinGetPos WinGetProcess
+ WinGetState WinGetText WinGetTitle WinKill WinList WinMenuSelectItem
+ WinMinimizeAll WinMinimizeAllUndo WinMove WinSetOnTop WinSetState WinSetTitle
+ WinSetTrans WinWait WinWaitActive WinWaitClose WinWaitNotActive
+]], true)))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = ';' * lexer.nonnewline_esc^0
+local block_comment1 = '#comments-start' * (lexer.any - '#comments-end')^0 *
+ P('#comments-end')^-1
+local block_comment2 = '#cs' * (lexer.any - '#ce')^0 * P('#ce')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment1 +
+ block_comment2))
+
+-- Preprocessor.
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, '#' * word_match([[
+ include-once include pragma forceref RequireAdmin NoTrayIcon
+ OnAutoItStartRegister
+]], true)))
+
+-- Strings.
+local dq_str = lexer.delimited_range('"', true, true)
+local sq_str = lexer.delimited_range("'", true, true)
+local inc = lexer.delimited_range('<>', true, true, true)
+lex:add_rule('string', token(lexer.STRING, dq_str + sq_str + inc))
+
+-- Macros.
+lex:add_rule('macro', token('macro', '@' * (lexer.alnum + '_')^1))
+lex:add_style('macro', lexer.STYLE_PREPROCESSOR)
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '$' * (lexer.alnum + '_')^1))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-^*/&<>=?:()[]')))
+
+return lex
diff --git a/lexlua/awk.lua b/lexlua/awk.lua
new file mode 100644
index 000000000..a3f69fd83
--- /dev/null
+++ b/lexlua/awk.lua
@@ -0,0 +1,297 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- AWK LPeg lexer.
+-- Modified by Wolfgang Seeberg 2012, 2013.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('awk')
+
+local LEFTBRACKET = '['
+local RIGHTBRACKET = ']'
+local SLASH = '/'
+local BACKSLASH = '\\'
+local CARET = '^'
+local CR = '\r'
+local LF = '\n'
+local CRLF = CR .. LF
+local DQUOTE = '"'
+local DELIMITER_MATCHES = {['('] = ')', ['['] = ']'}
+local COMPANION = {['('] = '[', ['['] = '('}
+local CC = {
+ alnum = 1, alpha = 1, blank = 1, cntrl = 1, digit = 1, graph = 1, lower = 1,
+ print = 1, punct = 1, space = 1, upper = 1, xdigit = 1
+}
+local LastRegexEnd = 0
+local BackslashAtCommentEnd = 0
+local KW_BEFORE_RX = {
+ case = 1, ['do'] = 1, ['else'] = 1, exit = 1, print = 1, printf = 1,
+ ['return'] = 1
+}
+
+local function findKeyword(input, e)
+ local i = e
+ while i > 0 and input:find("^[%l]", i) do i = i - 1 end
+ local w = input:sub(i + 1, e)
+ if i == 0 then
+ return KW_BEFORE_RX[w] == 1
+ elseif input:find("^[%u%d_]", i) then
+ return false
+ else
+ return KW_BEFORE_RX[w] == 1
+ end
+end
+
+local function isRegex(input, i)
+ while i >= 1 and input:find('^[ \t]', i) do i = i - 1 end
+ if i < 1 then return true end
+ if input:find("^[-!%%&(*+,:;<=>?[^{|}~\f]", i) or findKeyword(input, i) then
+ return true
+ elseif input:sub(i, i) == SLASH then
+ return i ~= LastRegexEnd -- deals with /xx/ / /yy/.
+ elseif input:find('^[]%w)."]', i) then
+ return false
+ elseif input:sub(i, i) == LF then
+ if i == 1 then return true end
+ i = i - 1
+ if input:sub(i, i) == CR then
+ if i == 1 then return true end
+ i = i - 1
+ end
+ elseif input:sub(i, i) == CR then
+ if i == 1 then return true end
+ i = i - 1
+ else
+ return false
+ end
+ if input:sub(i, i) == BACKSLASH and i ~= BackslashAtCommentEnd then
+ return isRegex(input, i - 1)
+ else
+ return true
+ end
+end
+
+local function eatCharacterClass(input, s, e)
+ local i = s
+ while i <= e do
+ if input:find('^[\r\n]', i) then
+ return false
+ elseif input:sub(i, i + 1) == ':]' then
+ local str = input:sub(s, i - 1)
+ return CC[str] == 1 and i + 1
+ end
+ i = i + 1
+ end
+ return false
+end
+
+local function eatBrackets(input, i, e)
+ if input:sub(i, i) == CARET then i = i + 1 end
+ if input:sub(i, i) == RIGHTBRACKET then i = i + 1 end
+ while i <= e do
+ if input:find('^[\r\n]', i) then
+ return false
+ elseif input:sub(i, i) == RIGHTBRACKET then
+ return i
+ elseif input:sub(i, i + 1) == '[:' then
+ i = eatCharacterClass(input, i + 2, e)
+ if not i then return false end
+ elseif input:sub(i, i) == BACKSLASH then
+ i = i + 1
+ if input:sub(i, i + 1) == CRLF then i = i + 1 end
+ end
+ i = i + 1
+ end
+ return false
+end
+
+local function eatRegex(input, i)
+ local e = #input
+ while i <= e do
+ if input:find('^[\r\n]', i) then
+ return false
+ elseif input:sub(i, i) == SLASH then
+ LastRegexEnd = i
+ return i
+ elseif input:sub(i, i) == LEFTBRACKET then
+ i = eatBrackets(input, i + 1, e)
+ if not i then return false end
+ elseif input:sub(i, i) == BACKSLASH then
+ i = i + 1
+ if input:sub(i, i + 1) == CRLF then i = i + 1 end
+ end
+ i = i + 1
+ end
+ return false
+end
+
+local ScanRegexResult
+local function scanGawkRegex(input, index)
+ if isRegex(input, index - 2) then
+ local i = eatRegex(input, index)
+ if not i then
+ ScanRegexResult = false
+ return false
+ end
+ local rx = input:sub(index - 1, i)
+ for bs in rx:gmatch("[^\\](\\+)[BSsWwy<>`']") do
+ -- /\S/ is special, but /\\S/ is not.
+ if #bs % 2 == 1 then return i + 1 end
+ end
+ ScanRegexResult = i + 1
+ else
+ ScanRegexResult = false
+ end
+ return false
+end
+-- Is only called immediately after scanGawkRegex().
+local function scanRegex()
+ return ScanRegexResult
+end
+
+local function scanString(input, index)
+ local i = index
+ local e = #input
+ while i <= e do
+ if input:find('^[\r\n]', i) then
+ return false
+ elseif input:sub(i, i) == DQUOTE then
+ return i + 1
+ elseif input:sub(i, i) == BACKSLASH then
+ i = i + 1
+ -- lexer.delimited_range() doesn't handle CRLF.
+ if input:sub(i, i + 1) == CRLF then i = i + 1 end
+ end
+ i = i + 1
+ end
+ return false
+end
+
+-- purpose: prevent isRegex() from entering a comment line that ends with a
+-- backslash.
+local function scanComment(input, index)
+ local _, i = input:find('[^\r\n]*', index)
+ if input:sub(i, i) == BACKSLASH then BackslashAtCommentEnd = i end
+ return i + 1
+end
+
+local function scanFieldDelimiters(input, index)
+ local i = index
+ local e = #input
+ local left = input:sub(i - 1, i - 1)
+ local count = 1
+ local right = DELIMITER_MATCHES[left]
+ local left2 = COMPANION[left]
+ local count2 = 0
+ local right2 = DELIMITER_MATCHES[left2]
+ while i <= e do
+ if input:find('^[#\r\n]', i) then
+ return false
+ elseif input:sub(i, i) == right then
+ count = count - 1
+ if count == 0 then return count2 == 0 and i + 1 end
+ elseif input:sub(i, i) == left then
+ count = count + 1
+ elseif input:sub(i, i) == right2 then
+ count2 = count2 - 1
+ if count2 < 0 then return false end
+ elseif input:sub(i, i) == left2 then
+ count2 = count2 + 1
+ elseif input:sub(i, i) == DQUOTE then
+ i = scanString(input, i + 1)
+ if not i then return false end
+ i = i - 1
+ elseif input:sub(i, i) == SLASH then
+ if isRegex(input, i - 1) then
+ i = eatRegex(input, i + 1)
+ if not i then return false end
+ end
+ elseif input:sub(i, i) == BACKSLASH then
+ if input:sub(i + 1, i + 2) == CRLF then
+ i = i + 2
+ elseif input:find('^[\r\n]', i + 1) then
+ i = i + 1
+ end
+ end
+ i = i + 1
+ end
+ return false
+end
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * P(scanComment)))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, DQUOTE * P(scanString)))
+
+-- No leading sign because it might be binary.
+local float = ((lexer.digit^1 * ('.' * lexer.digit^0)^-1) +
+ ('.' * lexer.digit^1)) *
+ (S('eE') * S('+-')^-1 * lexer.digit^1)^-1
+
+-- Fields. E.g. $1, $a, $(x), $a(x), $a[x], $"1", $$a, etc.
+lex:add_rule('field',
+ token('field', P('$') * S('$+-')^0 *
+ (float +
+ lexer.word^0 * '(' * P(scanFieldDelimiters) +
+ lexer.word^1 * ('[' * P(scanFieldDelimiters))^-1 +
+ '"' * P(scanString) +
+ '/' * P(eatRegex) * '/')))
+lex:add_style('field', lexer.STYLE_LABEL)
+
+-- Regular expressions.
+-- Slash delimited regular expressions are preceded by most operators or
+-- the keywords 'print' and 'case', possibly on a preceding line. They
+-- can contain unescaped slashes and brackets in brackets. Some escape
+-- sequences like '\S', '\s' have special meanings with Gawk. Tokens that
+-- contain them are displayed differently.
+lex:add_rule('gawkRegex', token('gawkRegex', SLASH * P(scanGawkRegex)))
+lex:add_style('gawkRegex', lexer.STYLE_PREPROCESSOR..',underlined')
+lex:add_rule('regex', token(lexer.REGEX, SLASH * P(scanRegex)))
+
+-- Operators.
+lex:add_rule('gawkOperator', token('gawkOperator', P("|&") + "@" + "**=" +
+ "**"))
+lex:add_style('gawkOperator', lexer.STYLE_OPERATOR..',underlined')
+lex:add_rule('operator', token(lexer.OPERATOR, S('!%&()*+,-/:;<=>?[\\]^{|}~')))
+
+-- Numbers.
+lex:add_rule('gawkNumber', token('gawkNumber', lexer.hex_num + lexer.oct_num))
+lex:add_style('gawkNumber', lexer.STYLE_NUMBER..',underlined')
+lex:add_rule('number', token(lexer.NUMBER, float))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ BEGIN END atan2 break close continue cos delete do else exit exp fflush for
+ function getline gsub if in index int length log match next nextfile print
+ printf rand return sin split sprintf sqrt srand sub substr system tolower
+ toupper while
+]]))
+
+lex:add_rule('builtInVariable', token('builtInVariable', word_match[[
+ ARGC ARGV CONVFMT ENVIRON FILENAME FNR FS NF NR OFMT OFS ORS RLENGTH RS RSTART
+ SUBSEP
+]]))
+lex:add_style('builtInVariable', lexer.STYLE_CONSTANT)
+
+lex:add_rule('gawkBuiltInVariable', token('gawkBuiltInVariable', word_match[[
+ ARGIND BINMODE ERRNO FIELDWIDTHS FPAT FUNCTAB IGNORECASE LINT PREC PROCINFO
+ ROUNDMODE RT SYMTAB TEXTDOMAIN
+]]))
+lex:add_style('gawkBuiltInVariable', lexer.STYLE_CONSTANT..',underlined')
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, lexer.word * #P('(')))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/bash.lua b/lexlua/bash.lua
new file mode 100644
index 000000000..bd738e47b
--- /dev/null
+++ b/lexlua/bash.lua
@@ -0,0 +1,60 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Shell LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('bash')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ if then elif else fi case in esac while for do done continue local return
+ select
+ -- Operators.
+ -a -b -c -d -e -f -g -h -k -p -r -s -t -u -w -x -O -G -L -S -N -nt -ot -ef -o
+ -z -n -eq -ne -lt -le -gt -ge
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"')
+local ex_str = lexer.delimited_range('`')
+local heredoc = '<<' * P(function(input, index)
+ local s, e, _, delimiter =
+ input:find('%-?(["\']?)([%a_][%w_]*)%1[\n\r\f;]+', index)
+ if s == index and delimiter then
+ local _, e = input:find('[\n\r\f]+'..delimiter, e)
+ return e and e + 1 or #input + 1
+ end
+end)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ex_str + heredoc))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ '$' * (S('!#?*@$') + lexer.digit^1 + lexer.word +
+ lexer.delimited_range('{}', true, true))))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'if', 'fi')
+lex:add_fold_point(lexer.KEYWORD, 'case', 'esac')
+lex:add_fold_point(lexer.KEYWORD, 'do', 'done')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/batch.lua b/lexlua/batch.lua
new file mode 100644
index 000000000..c81ec08d1
--- /dev/null
+++ b/lexlua/batch.lua
@@ -0,0 +1,52 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Batch LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('batch', {case_insensitive_fold_points = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ cd chdir md mkdir cls for if echo echo. move copy ren del set call exit
+ setlocal shift endlocal pause defined exist errorlevel else in do NUL AUX PRN
+ not goto pushd popd
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ APPEND ATTRIB CHKDSK CHOICE DEBUG DEFRAG DELTREE DISKCOMP DISKCOPY DOSKEY
+ DRVSPACE EMM386 EXPAND FASTOPEN FC FDISK FIND FORMAT GRAPHICS KEYB LABEL
+ LOADFIX MEM MODE MORE MOVE MSCDEX NLSFUNC POWER PRINT RD REPLACE RESTORE
+ SETVER SHARE SORT SUBST SYS TREE UNDELETE UNFORMAT VSAFE XCOPY
+]], true)))
+
+-- Comments.
+local rem = (P('REM') + 'rem') * lexer.space
+lex:add_rule('comment', token(lexer.COMMENT, (rem + '::') * lexer.nonnewline^0))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ '%' * (lexer.digit + '%' * lexer.alpha) +
+ lexer.delimited_range('%', true, true)))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, ':' * lexer.word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+|&!<>=')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'setlocal', 'endlocal')
+
+return lex
diff --git a/lexlua/bibtex.lua b/lexlua/bibtex.lua
new file mode 100644
index 000000000..bdca1a807
--- /dev/null
+++ b/lexlua/bibtex.lua
@@ -0,0 +1,45 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Bibtex LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('bibtex')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+-- Fields.
+lex:add_rule('field', token('field', word_match[[
+ author title journal year volume number pages month note key publisher editor
+ series address edition howpublished booktitle organization chapter school
+ institution type isbn issn affiliation issue keyword url
+]]))
+lex:add_style('field', lexer.STYLE_CONSTANT)
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"') +
+ lexer.delimited_range('{}', false, true, true)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(',=')))
+
+-- Embedded in Latex.
+local latex = lexer.load('latex')
+
+-- Embedded Bibtex.
+local entry = token('entry', P('@') * word_match([[
+ book article booklet conference inbook incollection inproceedings manual
+ mastersthesis lambda misc phdthesis proceedings techreport unpublished
+]], true))
+lex:add_style('entry', lexer.STYLE_PREPROCESSOR)
+local bibtex_start_rule = entry * ws^0 * token(lexer.OPERATOR, P('{'))
+local bibtex_end_rule = token(lexer.OPERATOR, P('}'))
+latex:embed(lex, bibtex_start_rule, bibtex_end_rule)
+
+return lex
diff --git a/lexlua/boo.lua b/lexlua/boo.lua
new file mode 100644
index 000000000..d1b1d6849
--- /dev/null
+++ b/lexlua/boo.lua
@@ -0,0 +1,64 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Boo LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('boo')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and break cast continue elif else ensure except for given goto if in isa is
+ not or otherwise pass raise ref try unless when while
+ -- Definitions.
+ abstract callable class constructor def destructor do enum event final get
+ interface internal of override partial private protected public return set
+ static struct transient virtual yield
+ -- Namespaces.
+ as from import namespace
+ -- Other.
+ self super null true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool byte char date decimal double duck float int long object operator regex
+ sbyte short single string timespan uint ulong ushort
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ array assert checked enumerate __eval__ filter getter len lock map matrix max
+ min normalArrayIndexing print property range rawArrayIndexing required
+ __switch__ typeof unchecked using yieldAll zip
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local triple_dq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local regex_str = #P('/') * lexer.last_char_includes('!%^&*([{-=+|:;,?<>~') *
+ lexer.delimited_range('/', true)
+lex:add_rule('string', token(lexer.STRING, triple_dq_str + sq_str + dq_str) +
+ token(lexer.REGEX, regex_str))
+
+-- Comments.
+local line_comment = '#' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ (S('msdhsfFlL') + 'ms')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~`')))
+
+return lex
diff --git a/lexlua/caml.lua b/lexlua/caml.lua
new file mode 100644
index 000000000..10e308af0
--- /dev/null
+++ b/lexlua/caml.lua
@@ -0,0 +1,62 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- OCaml LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('caml')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and as asr begin class closed constraint do done downto else end exception
+ external failwith false flush for fun function functor if in include incr
+ inherit land let load los lsl lsr lxor match method mod module mutable new not
+ of open option or parser private raise rec ref regexp sig stderr stdin stdout
+ struct then to true try type val virtual when while with
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool char float int string unit
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs abs_float acos asin atan atan2 at_exit bool_of_string ceil char_of_int
+ classify_float close_in close_in_noerr close_out close_out_noerr compare cos
+ cosh decr epsilon_float exit exp failwith float float_of_int float_of_string
+ floor flush flush_all format_of_string frexp fst ignore in_channel_length incr
+ infinity input input_binary_int input_byte input_char input_line input_value
+ int_of_char int_of_float int_of_string invalid_arg ldexp log log10 max
+ max_float max_int min min_float min_int mod modf mod_float nan open_in
+ open_in_bin open_in_gen open_out open_out_bin open_out_gen out_channel_length
+ output output_binary_int output_byte output_char output_string output_value
+ pos_in pos_out pred prerr_char prerr_endline prerr_float prerr_int
+ prerr_newline prerr_string print_char print_endline print_float print_int
+ print_newline print_string raise read_float read_int read_line really_input
+ seek_in seek_out set_binary_mode_in set_binary_mode_out sin sinh snd sqrt
+ stderr stdin stdout string_of_bool string_of_float string_of_format
+ string_of_int succ tan tanh truncate
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, lexer.nested_pair('(*', '*)')))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/.,:;~!#%^&|?[](){}')))
+
+return lex
diff --git a/lexlua/chuck.lua b/lexlua/chuck.lua
new file mode 100644
index 000000000..988732ab4
--- /dev/null
+++ b/lexlua/chuck.lua
@@ -0,0 +1,72 @@
+-- Copyright 2010-2018 Martin Morawetz. See License.txt.
+-- ChucK LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('chuck')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Control structures.
+ break continue else for if repeat return switch until while
+ -- Other chuck keywords.
+ function fun spork const new
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ -- Special values.
+ false maybe me null NULL pi true
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ float int time dur void same
+]]))
+
+-- Classes.
+lex:add_rule('class', token(lexer.CLASS, word_match[[
+ -- Class keywords.
+ class extends implements interface private protected public pure static super
+ this
+]]))
+
+-- Global ugens.
+lex:add_rule('ugen', token('ugen', word_match[[dac adc blackhole]]))
+lex:add_style('ugen', lexer.STYLE_CONSTANT)
+
+-- Times.
+lex:add_rule('time', token('time', word_match[[
+ samp ms second minute hour day week
+]]))
+lex:add_style('time', lexer.STYLE_NUMBER)
+
+-- Special special value.
+lex:add_rule('now', token('now', P('now')))
+lex:add_style('now', lexer.STYLE_CONSTANT..',bold')
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}@')))
+
+return lex
diff --git a/lexlua/cmake.lua b/lexlua/cmake.lua
new file mode 100644
index 000000000..0d3b3c5d4
--- /dev/null
+++ b/lexlua/cmake.lua
@@ -0,0 +1,140 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- CMake LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('cmake')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ IF ENDIF FOREACH ENDFOREACH WHILE ENDWHILE ELSE ELSEIF
+]], true)))
+
+-- Commands.
+lex:add_rule('command', token(lexer.FUNCTION, word_match([[
+ ADD_CUSTOM_COMMAND ADD_CUSTOM_TARGET ADD_DEFINITIONS ADD_DEPENDENCIES
+ ADD_EXECUTABLE ADD_LIBRARY ADD_SUBDIRECTORY ADD_TEST AUX_SOURCE_DIRECTORY
+ BUILD_COMMAND BUILD_NAME CMAKE_MINIMUM_REQUIRED CONFIGURE_FILE
+ CREATE_TEST_SOURCELIST ENABLE_LANGUAGE ENABLE_TESTING ENDMACRO EXEC_PROGRAM
+ EXECUTE_PROCESS EXPORT_LIBRARY_DEPENDENCIES FILE FIND_FILE FIND_LIBRARY
+ FIND_PACKAGE FIND_PATH FIND_PROGRAM FLTK_WRAP_UI GET_CMAKE_PROPERTY
+ GET_DIRECTORY_PROPERTY GET_FILENAME_COMPONENT GET_SOURCE_FILE_PROPERTY
+ GET_TARGET_PROPERTY GET_TEST_PROPERTY INCLUDE INCLUDE_DIRECTORIES
+ INCLUDE_EXTERNAL_MSPROJECT INCLUDE_REGULAR_EXPRESSION INSTALL INSTALL_FILES
+ INSTALL_PROGRAMS INSTALL_TARGETS LINK_DIRECTORIES LINK_LIBRARIES LIST
+ LOAD_CACHE LOAD_COMMAND MACRO MAKE_DIRECTORY MARK_AS_ADVANCED MATH MESSAGE
+ OPTION OUTPUT_REQUIRED_FILES PROJECT QT_WRAP_CPP QT_WRAP_UI REMOVE
+ REMOVE_DEFINITIONS SEPARATE_ARGUMENTS SET SET_DIRECTORY_PROPERTIES
+ SET_SOURCE_FILES_PROPERTIES SET_TARGET_PROPERTIES SET_TESTS_PROPERTIES
+ SITE_NAME SOURCE_GROUP STRING SUBDIR_DEPENDS SUBDIRS TARGET_LINK_LIBRARIES
+ TRY_COMPILE TRY_RUN USE_MANGLED_MESA UTILITY_SOURCE VARIABLE_REQUIRES
+ VTK_MAKE_INSTANTIATOR VTK_WRAP_JAVA VTK_WRAP_PYTHON VTK_WRAP_TCL WRITE_FILE
+]], true)))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match([[
+ BOOL CACHE FALSE N NO ON OFF NOTFOUND TRUE
+]], true)))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, word_match[[
+ APPLE ARGS BORLAND CMAKE_AR CMAKE_BACKWARDS_COMPATIBILITY CMAKE_BASE_NAME
+ CMAKE_BINARY_DIR CMAKE_BUILD_TOOL CMAKE_BUILD_TYPE CMAKE_CACHEFILE_DIR
+ CMAKE_CACHE_MAJOR_VERSION CMAKE_CACHE_MINOR_VERSION
+ CMAKE_CACHE_RELEASE_VERSION CMAKE_C_COMPILE_OBJECT CMAKE_C_COMPILER
+ CMAKE_C_COMPILER_ARG1 CMAKE_C_COMPILER_ENV_VAR CMAKE_C_COMPILER_FULLPATH
+ CMAKE_C_COMPILER_LOADED CMAKE_C_COMPILER_WORKS CMAKE_C_CREATE_SHARED_LIBRARY
+ CMAKE_C_CREATE_SHARED_LIBRARY_FORBIDDEN_FLAGS CMAKE_C_CREATE_SHARED_MODULE
+ CMAKE_C_CREATE_STATIC_LIBRARY CMAKE_CFG_INTDIR CMAKE_C_FLAGS
+ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_DEBUG_INIT CMAKE_C_FLAGS_INIT
+ CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_MINSIZEREL_INIT CMAKE_C_FLAGS_RELEASE
+ CMAKE_C_FLAGS_RELEASE_INIT CMAKE_C_FLAGS_RELWITHDEBINFO
+ CMAKE_C_FLAGS_RELWITHDEBINFO_INIT CMAKE_C_IGNORE_EXTENSIONS
+ CMAKE_C_INFORMATION_LOADED CMAKE_C_LINKER_PREFERENCE CMAKE_C_LINK_EXECUTABLE
+ CMAKE_C_LINK_FLAGS CMAKE_COLOR_MAKEFILE CMAKE_COMMAND CMAKE_COMPILER_IS_GNUCC
+ CMAKE_COMPILER_IS_GNUCC_RUN CMAKE_COMPILER_IS_GNUCXX
+ CMAKE_COMPILER_IS_GNUCXX_RUN CMAKE_C_OUTPUT_EXTENSION
+ CMAKE_C_SOURCE_FILE_EXTENSIONS CMAKE_CTEST_COMMAND CMAKE_CURRENT_BINARY_DIR
+ CMAKE_CURRENT_SOURCE_DIR CMAKE_CXX_COMPILE_OBJECT CMAKE_CXX_COMPILER
+ CMAKE_CXX_COMPILER_ARG1 CMAKE_CXX_COMPILER_ENV_VAR CMAKE_CXX_COMPILER_FULLPATH
+ CMAKE_CXX_COMPILER_LOADED CMAKE_CXX_COMPILER_WORKS
+ CMAKE_CXX_CREATE_SHARED_LIBRARY
+ CMAKE_CXX_CREATE_SHARED_LIBRARY_FORBIDDEN_FLAGS CMAKE_CXX_CREATE_SHARED_MODULE
+ CMAKE_CXX_CREATE_STATIC_LIBRARY CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG
+ CMAKE_CXX_FLAGS_DEBUG_INIT CMAKE_CXX_FLAGS_INIT CMAKE_CXX_FLAGS_MINSIZEREL
+ CMAKE_CXX_FLAGS_MINSIZEREL_INIT CMAKE_CXX_FLAGS_RELEASE
+ CMAKE_CXX_FLAGS_RELEASE_INIT CMAKE_CXX_FLAGS_RELWITHDEBINFO
+ CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT CMAKE_CXX_IGNORE_EXTENSIONS
+ CMAKE_CXX_INFORMATION_LOADED CMAKE_CXX_LINKER_PREFERENCE
+ CMAKE_CXX_LINK_EXECUTABLE CMAKE_CXX_LINK_FLAGS CMAKE_CXX_OUTPUT_EXTENSION
+ CMAKE_CXX_SOURCE_FILE_EXTENSIONS CMAKE_DL_LIBS CMAKE_EDIT_COMMAND
+ CMAKE_EXECUTABLE_SUFFIX CMAKE_EXE_LINKER_FLAGS CMAKE_EXE_LINKER_FLAGS_DEBUG
+ CMAKE_EXE_LINKER_FLAGS_MINSIZEREL CMAKE_EXE_LINKER_FLAGS_RELEASE
+ CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO CMAKE_FILES_DIRECTORY
+ CMAKE_FIND_APPBUNDLE CMAKE_FIND_FRAMEWORK CMAKE_FIND_LIBRARY_PREFIXES
+ CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_GENERATOR CMAKE_HOME_DIRECTORY
+ CMAKE_INCLUDE_FLAG_C CMAKE_INCLUDE_FLAG_C_SEP CMAKE_INCLUDE_FLAG_CXX
+ CMAKE_INIT_VALUE CMAKE_INSTALL_PREFIX CMAKE_LIBRARY_PATH_FLAG
+ CMAKE_LINK_LIBRARY_FLAG CMAKE_LINK_LIBRARY_SUFFIX
+ CMAKE_MacOSX_Content_COMPILE_OBJECT CMAKE_MAJOR_VERSION CMAKE_MAKE_PROGRAM
+ CMAKE_MINOR_VERSION CMAKE_MODULE_EXISTS CMAKE_MODULE_LINKER_FLAGS
+ CMAKE_MODULE_LINKER_FLAGS_DEBUG CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
+ CMAKE_MODULE_LINKER_FLAGS_RELEASE CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
+ CMAKE_NUMBER_OF_LOCAL_GENERATORS CMAKE_OSX_ARCHITECTURES _CMAKE_OSX_MACHINE
+ CMAKE_OSX_SYSROOT CMAKE_PARENT_LIST_FILE CMAKE_PATCH_VERSION
+ CMAKE_PLATFORM_HAS_INSTALLNAME CMAKE_PLATFORM_IMPLICIT_INCLUDE_DIRECTORIES
+ CMAKE_PLATFORM_ROOT_BIN CMAKE_PROJECT_NAME CMAKE_RANLIB CMAKE_ROOT
+ CMAKE_SHARED_LIBRARY_C_FLAGS CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS
+ CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS CMAKE_SHARED_LIBRARY_CXX_FLAGS
+ CMAKE_SHARED_LIBRARY_LINK_C_FLAGS CMAKE_SHARED_LIBRARY_PREFIX
+ CMAKE_SHARED_LIBRARY_RUNTIME_C_FLAG CMAKE_SHARED_LIBRARY_RUNTIME_C_FLAG_SEP
+ CMAKE_SHARED_LIBRARY_SONAME_C_FLAG CMAKE_SHARED_LIBRARY_SONAME_CXX_FLAG
+ CMAKE_SHARED_LIBRARY_SUFFIX CMAKE_SHARED_LINKER_FLAGS
+ CMAKE_SHARED_LINKER_FLAGS_DEBUG CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
+ CMAKE_SHARED_LINKER_FLAGS_RELEASE CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
+ CMAKE_SHARED_MODULE_CREATE_C_FLAGS CMAKE_SHARED_MODULE_CREATE_CXX_FLAGS
+ CMAKE_SHARED_MODULE_PREFIX CMAKE_SHARED_MODULE_SUFFIX CMAKE_SIZEOF_VOID_P
+ CMAKE_SKIP_RPATH CMAKE_SOURCE_DIR CMAKE_STATIC_LIBRARY_PREFIX
+ CMAKE_STATIC_LIBRARY_SUFFIX CMAKE_SYSTEM CMAKE_SYSTEM_AND_C_COMPILER_INFO_FILE
+ CMAKE_SYSTEM_AND_CXX_COMPILER_INFO_FILE CMAKE_SYSTEM_APPBUNDLE_PATH
+ CMAKE_SYSTEM_FRAMEWORK_PATH CMAKE_SYSTEM_INCLUDE_PATH CMAKE_SYSTEM_INFO_FILE
+ CMAKE_SYSTEM_LIBRARY_PATH CMAKE_SYSTEM_LOADED CMAKE_SYSTEM_NAME
+ CMAKE_SYSTEM_PROCESSOR CMAKE_SYSTEM_PROGRAM_PATH
+ CMAKE_SYSTEM_SPECIFIC_INFORMATION_LOADED CMAKE_SYSTEM_VERSION CMAKE_UNAME
+ CMAKE_USE_RELATIVE_PATHS CMAKE_VERBOSE_MAKEFILE CYGWIN EXECUTABLE_OUTPUT_PATH
+ FORCE HAVE_CMAKE_SIZEOF_VOID_P LIBRARY_OUTPUT_PATH LOCATION MACOSX_BUNDLE
+ MINGW MSVC MSVC60 MSVC70 MSVC71 MSVC80 MSVC_IDE POST_BUILD PRE_BUILD
+ PROJECT_BINARY_DIR PROJECT_NAME PROJECT_SOURCE_DIR RUN_CONFIGURE TARGET
+ UNIX WIN32
+]] + P('$') * lexer.delimited_range('{}', false, true)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, word_match[[
+ AND COMMAND DEFINED DOC EQUAL EXISTS GREATER INTERNAL LESS MATCHES NAME NAMES
+ NAME_WE NOT OR PATH PATHS PROGRAM STREQUAL STRGREATER STRINGS STRLESS
+]] + S('=(){}')))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'IF', 'ENDIF')
+lex:add_fold_point(lexer.KEYWORD, 'FOREACH', 'ENDFOREACH')
+lex:add_fold_point(lexer.KEYWORD, 'WHILE', 'ENDWHILE')
+lex:add_fold_point(lexer.FUNCTION, 'MACRO', 'ENDMACRO')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/coffeescript.lua b/lexlua/coffeescript.lua
new file mode 100644
index 000000000..55c4154e2
--- /dev/null
+++ b/lexlua/coffeescript.lua
@@ -0,0 +1,46 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- CoffeeScript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, S = lpeg.P, lpeg.S
+
+local lex = lexer.new('coffeescript', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ all and bind break by case catch class const continue default delete do each
+ else enum export extends false finally for function if import in instanceof is
+ isnt let loop native new no not of off on or return super switch then this
+ throw true try typeof unless until var void when while with yes
+]]))
+
+-- Fields: object properties and methods.
+lex:add_rule('field', token(lexer.FUNCTION, '.' * (S('_$') + lexer.alpha) *
+ (S('_$') + lexer.alnum)^0))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local regex_str = #P('/') * lexer.last_char_includes('+-*%<>!=^&|?~:;,([{') *
+ lexer.delimited_range('/', true) * S('igm')^0
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')) +
+ token(lexer.REGEX, regex_str))
+
+-- Comments.
+local block_comment = '###' * (lexer.any - '###')^0 * P('###')^-1
+local line_comment = '#' * lexer.nonnewline_esc^0
+lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}')))
+
+return lex
diff --git a/lexlua/container.lua b/lexlua/container.lua
new file mode 100644
index 000000000..212748ec5
--- /dev/null
+++ b/lexlua/container.lua
@@ -0,0 +1,5 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Container LPeg lexer.
+-- This is SciTE's plain text lexer.
+
+return require('lexer').new('container')
diff --git a/lexlua/context.lua b/lexlua/context.lua
new file mode 100644
index 000000000..5b3510671
--- /dev/null
+++ b/lexlua/context.lua
@@ -0,0 +1,47 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- ConTeXt LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('context')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0))
+
+-- ConTeXt environments.
+local environment = token('environment', '\\' * (P('start') + 'stop') *
+ lexer.word)
+lex:add_rule('environment', environment)
+lex:add_style('environment', lexer.STYLE_KEYWORD)
+
+-- Sections.
+lex:add_rule('section', token('section', '\\' * word_match[[
+ chapter part section subject subsection subsubject subsubsection subsubsubject
+ title
+]]))
+lex:add_style('section', lexer.STYLE_CLASS)
+
+-- Commands.
+lex:add_rule('command', token(lexer.KEYWORD, '\\' *
+ (lexer.alpha^1 + S('#$&~_^%{}'))))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('$&#{}[]')))
+
+-- Fold points.
+lex:add_fold_point('environment', '\\start', '\\stop')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '%', lexer.fold_line_comments('%'))
+
+-- Embedded Lua.
+local luatex = lexer.load('lua')
+local luatex_start_rule = #P('\\startluacode') * environment
+local luatex_end_rule = #P('\\stopluacode') * environment
+lex:embed(luatex, luatex_start_rule, luatex_end_rule)
+
+return lex
diff --git a/lexlua/cpp.lua b/lexlua/cpp.lua
new file mode 100644
index 000000000..277b820e8
--- /dev/null
+++ b/lexlua/cpp.lua
@@ -0,0 +1,75 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- C++ LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('cpp')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ asm auto break case catch class const const_cast continue default delete do
+ dynamic_cast else explicit export extern false for friend goto if inline
+ mutable namespace new operator private protected public register
+ reinterpret_cast return sizeof static static_cast switch template this throw
+ true try typedef typeid typename using virtual volatile while
+ -- Operators.
+ and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq
+ -- C++11.
+ alignas alignof constexpr decltype final noexcept override static_assert
+ thread_local
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool char double enum float int long short signed struct union unsigned void
+ wchar_t
+ -- C++11.
+ char16_t char32_t nullptr
+]]))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define elif else endif error if ifdef ifndef import line pragma undef using
+ warning
+]]
+lex:add_rule('preprocessor',
+ #lexer.starts_line('#') *
+ (token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * preproc_word) +
+ token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * 'include') *
+ (token(lexer.WHITESPACE, S('\t ')^1) *
+ token(lexer.STRING,
+ lexer.delimited_range('<>', true, true)))^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/crystal.lua b/lexlua/crystal.lua
new file mode 100644
index 000000000..75f9437a6
--- /dev/null
+++ b/lexlua/crystal.lua
@@ -0,0 +1,122 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Copyright 2017 Michel Martens.
+-- Crystal LPeg lexer (based on Ruby).
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('crystal')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ alias begin break case class def defined? do else elsif end ensure false for
+ if in module next nil not redo rescue retry return self super then true undef
+ unless until when while yield __FILE__ __LINE__
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abort at_exit caller delay exit fork future get_stack_top gets lazy loop main
+ p print printf puts raise rand read_line require sleep spawn sprintf system
+ with_color
+ -- Macros.
+ assert_responds_to debugger parallel pp record redefine_main
+]]) * -S('.:|'))
+
+-- Identifiers.
+local word_char = lexer.alnum + S('_!?')
+local word = (lexer.alpha + '_') * word_char^0
+local identifier = token(lexer.IDENTIFIER, word)
+
+local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}'}
+local literal_delimitted = P(function(input, index)
+ local delimiter = input:sub(index, index)
+ if not delimiter:find('[%w\r\n\f\t ]') then -- only non alpha-numerics
+ local match_pos, patt
+ if delimiter_matches[delimiter] then
+ -- Handle nested delimiter/matches in strings.
+ local s, e = delimiter, delimiter_matches[delimiter]
+ patt = lexer.delimited_range(s..e, false, false, true)
+ else
+ patt = lexer.delimited_range(delimiter)
+ end
+ match_pos = lpeg.match(patt, input, index)
+ return match_pos or #input + 1
+ end
+end)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline_esc^0))
+
+-- Strings.
+local cmd_str = lexer.delimited_range('`')
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local heredoc = '<<' * P(function(input, index)
+ local s, e, indented, _, delimiter =
+ input:find('(%-?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index)
+ if s == index and delimiter then
+ local end_heredoc = (#indented > 0 and '[\n\r\f]+ *' or '[\n\r\f]+')
+ local _, e = input:find(end_heredoc..delimiter, e)
+ return e and e + 1 or #input + 1
+ end
+end)
+-- TODO: regex_str fails with `obj.method /patt/` syntax.
+local regex_str = #P('/') * lexer.last_char_includes('!%^&*([{-=+|:;,?<>~') *
+ lexer.delimited_range('/', true, false) * S('iomx')^0
+lex:add_rule('string', token(lexer.STRING, (sq_str + dq_str + heredoc +
+ cmd_str) * S('f')^-1) +
+ token(lexer.REGEX, regex_str))
+
+-- Numbers.
+local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0 * S('ri')^-1
+local bin = '0b' * S('01')^1 * ('_' * S('01')^1)^0
+local integer = S('+-')^-1 * (bin + lexer.hex_num + lexer.oct_num + dec)
+-- TODO: meta, control, etc. for numeric_literal.
+local numeric_literal = '?' * (lexer.any - lexer.space) * -word_char
+lex:add_rule('number', token(lexer.NUMBER, lexer.float * S('ri')^-1 + integer +
+ numeric_literal))
+
+-- Variables.
+local global_var = '$' * (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + lexer.digit +
+ '-' * S('0FadiIKlpvw'))
+local class_var = '@@' * word
+local inst_var = '@' * word
+lex:add_rule('variable', token(lexer.VARIABLE, global_var + class_var +
+ inst_var))
+
+-- Symbols.
+lex:add_rule('symbol', token('symbol', ':' * P(function(input, index)
+ if input:sub(index - 2, index - 2) ~= ':' then return index end
+end) * (word_char^1 + sq_str + dq_str)))
+lex:add_style('symbol', lexer.STYLE_CONSTANT)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~')))
+
+-- Fold points.
+local function disambiguate(text, pos, line, s)
+ return line:sub(1, s - 1):match('^%s*$') and
+ not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and 1 or 0
+end
+lex:add_fold_point(lexer.KEYWORD, 'begin', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'case', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'class', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'def', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'for', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'module', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'if', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'while', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'unless', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'until', disambiguate)
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.OPERATOR, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/csharp.lua b/lexlua/csharp.lua
new file mode 100644
index 000000000..4263c6672
--- /dev/null
+++ b/lexlua/csharp.lua
@@ -0,0 +1,68 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- C# LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('csharp')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ class delegate enum event interface namespace struct using abstract const
+ explicit extern fixed implicit internal lock out override params partial
+ private protected public ref sealed static readonly unsafe virtual volatile
+ add as assembly base break case catch checked continue default do else finally
+ for foreach get goto if in is new remove return set sizeof stackalloc super
+ switch this throw try typeof unchecked value void while yield
+ null true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool byte char decimal double float int long object operator sbyte short
+ string uint ulong ushort
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local ml_str = P('@')^-1 * lexer.delimited_range('"', false, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('lLdDfFMm')^-1))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define elif else endif error if line undef warning region endregion
+]]
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ S('\t ')^0 *
+ preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('~!.,:;+-*/<>=\\^|&%?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/css.lua b/lexlua/css.lua
new file mode 100644
index 000000000..7b3230287
--- /dev/null
+++ b/lexlua/css.lua
@@ -0,0 +1,165 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- CSS LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('css')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Properties.
+lex:add_rule('property', token('property', word_match[[
+ -- CSS 1.
+ color background-color background-image background-repeat
+ background-attachment background-position background font-family font-style
+ font-variant font-weight font-size font word-spacing letter-spacing
+ text-decoration vertical-align text-transform text-align text-indent
+ line-height margin-top margin-right margin-bottom margin-left margin
+ padding-top padding-right padding-bottom padding-left padding border-top-width
+ border-right-width border-bottom-width border-left-width border-width
+ border-top border-right border-bottom border-left border border-color
+ border-style width height float clear display white-space list-style-type
+ list-style-image list-style-position list-style
+ -- CSS 2.
+ border-top-color border-right-color border-bottom-color border-left-color
+ border-color border-top-style border-right-style border-bottom-style
+ border-left-style border-style top right bottom left position z-index
+ direction unicode-bidi min-width max-width min-height max-height overflow clip
+ visibility content quotes counter-reset counter-increment marker-offset size
+ marks page-break-before page-break-after page-break-inside page orphans widows
+ font-stretch font-size-adjust unicode-range units-per-em src panose-1 stemv
+ stemh slope cap-height x-height ascent descent widths bbox definition-src
+ baseline centerline mathline topline text-shadow caption-side table-layout
+ border-collapse border-spacing empty-cells speak-header cursor outline
+ outline-width outline-style outline-color volume speak pause-before
+ pause-after pause cue-before cue-after cue play-during azimuth elevation
+ speech-rate voice-family pitch pitch-range stress richness speak-punctuation
+ speak-numeral
+]]))
+lex:add_style('property', lexer.STYLE_KEYWORD)
+
+-- Values.
+lex:add_rule('value', token('value', word_match[[
+ -- CSS 1.
+ auto none normal italic oblique small-caps bold bolder lighter xx-small
+ x-small small medium large x-large xx-large larger smaller transparent repeat
+ repeat-x repeat-y no-repeat scroll fixed top bottom left center right justify
+ both underline overline line-through blink baseline sub super text-top middle
+ text-bottom capitalize uppercase lowercase thin medium thick dotted dashed
+ solid double groove ridge inset outset block inline list-item pre no-wrap
+ inside outside disc circle square decimal lower-roman upper-roman lower-alpha
+ upper-alpha aqua black blue fuchsia gray green lime maroon navy olive purple
+ red silver teal white yellow
+ -- CSS 2.
+ inherit run-in compact marker table inline-table table-row-group
+ table-header-group table-footer-group table-row table-column-group
+ table-column table-cell table-caption static relative absolute fixed ltr rtl
+ embed bidi-override visible hidden scroll collapse open-quote close-quote
+ no-open-quote no-close-quote decimal-leading-zero lower-greek lower-latin
+ upper-latin hebrew armenian georgian cjk-ideographic hiragana katakana
+ hiragana-iroha katakana-iroha landscape portrait crop cross always avoid wider
+ narrower ultra-condensed extra-condensed condensed semi-condensed
+ semi-expanded expanded extra-expanded ultra-expanded caption icon menu
+ message-box small-caption status-bar separate show hide once crosshair default
+ pointer move text wait help e-resize ne-resize nw-resize n-resize se-resize
+ sw-resize s-resize w-resize ActiveBorder ActiveCaption AppWorkspace Background
+ ButtonFace ButtonHighlight ButtonShadow InactiveCaptionText ButtonText
+ CaptionText GrayText Highlight HighlightText InactiveBorder InactiveCaption
+ InfoBackground InfoText Menu MenuText Scrollbar ThreeDDarkShadow ThreeDFace
+ ThreeDHighlight ThreeDLightShadow ThreeDShadow Window WindowFrame WindowText
+ silent x-soft soft medium loud x-loud spell-out mix left-side far-left
+ center-left center-right far-right right-side behind leftwards rightwards
+ below level above higher lower x-slow slow medium fast x-fast faster slower
+ male female child x-low low high x-high code digits continous
+]]))
+lex:add_style('value', lexer.STYLE_CONSTANT)
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ attr blackness blend blenda blur brightness calc circle color-mod contrast
+ counter cubic-bezier device-cmyk drop-shadow ellipse gray grayscale hsl hsla
+ hue hue-rotate hwb image inset invert lightness linear-gradient matrix
+ matrix3d opacity perspective polygon radial-gradient rect
+ repeating-linear-gradient repeating-radial-gradient rgb rgba rotate rotate3d
+ rotateX rotateY rotateZ saturate saturation scale scale3d scaleX scaleY scaleZ
+ sepia shade skewX skewY steps tint toggle translate translate3d translateX
+ translateY translateZ url whiteness
+]]))
+
+-- Colors.
+local xdigit = lexer.xdigit
+lex:add_rule('color', token('color', word_match[[
+ aliceblue antiquewhite aqua aquamarine azure beige bisque black
+ blanchedalmond blue blueviolet brown burlywood cadetblue chartreuse chocolate
+ coral cornflowerblue cornsilk crimson cyan darkblue darkcyan darkgoldenrod
+ darkgray darkgreen darkgrey darkkhaki darkmagenta darkolivegreen darkorange
+ darkorchid darkred darksalmon darkseagreen darkslateblue darkslategray
+ darkslategrey darkturquoise darkviolet deeppink deepskyblue dimgray dimgrey
+ dodgerblue firebrick floralwhite forestgreen fuchsia gainsboro ghostwhite gold
+ goldenrod gray green greenyellow grey honeydew hotpink indianred indigo ivory
+ khaki lavender lavenderblush lawngreen lemonchiffon lightblue lightcoral
+ lightcyan lightgoldenrodyellow lightgray lightgreen lightgrey lightpink
+ lightsalmon lightseagreen lightskyblue lightslategray lightslategrey
+ lightsteelblue lightyellow lime limegreen linen magenta maroon
+ mediumaquamarine mediumblue mediumorchid mediumpurple mediumseagreen
+ mediumslateblue mediumspringgreen mediumturquoise mediumvioletred
+ midnightblue mintcream mistyrose moccasin navajowhite navy oldlace olive
+ olivedrab orange orangered orchid palegoldenrod palegreen paleturquoise
+ palevioletred papayawhip peachpuff peru pink plum powderblue purple
+ rebeccapurple red rosybrown royalblue saddlebrown salmon sandybrown seagreen
+ seashell sienna silver skyblue slateblue slategray slategrey snow springgreen
+ steelblue tan teal thistle tomato transparent turquoise violet wheat white
+ whitesmoke yellow yellowgreen
+]] + '#' * xdigit * xdigit * xdigit * (xdigit * xdigit * xdigit)^-1))
+lex:add_style('color', lexer.STYLE_NUMBER)
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alpha *
+ (lexer.alnum + S('_-'))^0))
+
+-- Pseudo classes and pseudo elements.
+lex:add_rule('pseudoclass', ':' * token('pseudoclass', word_match[[
+ active checked disabled empty enabled first-child first-of-type focus hover
+ in-range invalid lang last-child last-of-type link not nth-child
+ nth-last-child nth-last-of-type nth-of-type only-of-type only-child optional
+ out-of-range read-only read-write required root target valid visited
+]]))
+lex:add_style('pseudoclass', lexer.STYLE_CONSTANT)
+lex:add_rule('pseudoelement', '::' * token('pseudoelement', word_match[[
+ after before first-letter first-line selection
+]]))
+lex:add_style('pseudoelement', lexer.STYLE_CONSTANT)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '/*' * (lexer.any - '*/')^0 *
+ P('*/')^-1))
+
+-- Numbers.
+local unit = token('unit', word_match[[
+ ch cm deg dpcm dpi dppx em ex grad Hz in kHz mm ms pc pt px q rad rem s turn
+ vh vmax vmin vw
+]])
+lex:add_style('unit', lexer.STYLE_NUMBER)
+lex:add_rule('number', token(lexer.NUMBER, lexer.digit^1) * unit^-1)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('~!#*>+=|.,:;()[]{}')))
+
+-- At rule.
+lex:add_rule('at_rule', token('at_rule', P('@') * word_match[[
+ charset font-face media page import namespace
+]]))
+lex:add_style('at_rule', lexer.STYLE_PREPROCESSOR)
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+
+return lex
diff --git a/lexlua/cuda.lua b/lexlua/cuda.lua
new file mode 100644
index 000000000..950392057
--- /dev/null
+++ b/lexlua/cuda.lua
@@ -0,0 +1,71 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- CUDA LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('cuda', {inherit = lexer.load('cpp')})
+
+-- Whitespace
+lex:modify_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:modify_rule('keyword', token(lexer.KEYWORD, word_match[[
+ __global__ __host__ __device__ __constant__ __shared__
+]]) + lex:get_rule('keyword'))
+
+-- Types.
+lex:modify_rule('type', token(lexer.TYPE, word_match[[
+ uint int1 uint1 int2 uint2 int3 uint3 int4 uint4 float1 float2 float3 float4
+ char1 char2 char3 char4 uchar1 uchar2 uchar3 uchar4 short1 short2 short3
+ short4 dim1 dim2 dim3 dim4
+]]) + lex:get_rule('type') +
+
+-- Functions.
+token(lexer.FUNCTION, word_match[[
+ -- Atom.
+ atomicAdd atomicAnd atomicCAS atomicDec atomicExch atomicInc atomicMax
+ atomicMin atomicOr atomicSub atomicXor
+ -- Dev.
+ tex1D tex1Dfetch tex2D __float_as_int __int_as_float __float2int_rn
+ __float2int_rz __float2int_ru __float2int_rd __float2uint_rn __float2uint_rz
+ __float2uint_ru __float2uint_rd __int2float_rn __int2float_rz __int2float_ru
+ __int2float_rd __uint2float_rn __uint2float_rz __uint2float_ru __uint2float_rd
+ __fadd_rz __fmul_rz __fdividef __mul24 __umul24 __mulhi __umulhi __mul64hi
+ __umul64hi min umin fminf fmin max umax fmaxf fmax abs fabsf fabs sqrtf sqrt
+ sinf __sinf sin cosf __cosf cos sincosf __sincosf expf __expf exp logf __logf
+ log
+ -- Runtime.
+ cudaBindTexture cudaBindTextureToArray cudaChooseDevice cudaConfigureCall
+ cudaCreateChannelDesc cudaD3D10GetDevice cudaD3D10MapResources
+ cudaD3D10RegisterResource cudaD3D10ResourceGetMappedArray
+ cudaD3D10ResourceGetMappedPitch cudaD3D10ResourceGetMappedPointer
+ cudaD3D10ResourceGetMappedSize cudaD3D10ResourceGetSurfaceDimensions
+ cudaD3D10ResourceSetMapFlags cudaD3D10SetDirect3DDevice
+ cudaD3D10UnmapResources cudaD3D10UnregisterResource cudaD3D9GetDevice
+ cudaD3D9GetDirect3DDevice cudaD3D9MapResources cudaD3D9RegisterResource
+ cudaD3D9ResourceGetMappedArray cudaD3D9ResourceGetMappedPitch
+ cudaD3D9ResourceGetMappedPointer cudaD3D9ResourceGetMappedSize
+ cudaD3D9ResourceGetSurfaceDimensions cudaD3D9ResourceSetMapFlags
+ cudaD3D9SetDirect3DDevice cudaD3D9UnmapResources cudaD3D9UnregisterResource
+ cudaEventCreate cudaEventDestroy cudaEventElapsedTime cudaEventQuery
+ cudaEventRecord cudaEventSynchronize cudaFree cudaFreeArray cudaFreeHost
+ cudaGetChannelDesc cudaGetDevice cudaGetDeviceCount cudaGetDeviceProperties
+ cudaGetErrorString cudaGetLastError cudaGetSymbolAddress cudaGetSymbolSize
+ cudaGetTextureAlignmentOffset cudaGetTextureReference cudaGLMapBufferObject
+ cudaGLRegisterBufferObject cudaGLSetGLDevice cudaGLUnmapBufferObject
+ cudaGLUnregisterBufferObject cudaLaunch cudaMalloc cudaMalloc3D
+ cudaMalloc3DArray cudaMallocArray cudaMallocHost cudaMallocPitch cudaMemcpy
+ cudaMemcpy2D cudaMemcpy2DArrayToArray cudaMemcpy2DFromArray
+ cudaMemcpy2DToArray cudaMemcpy3D cudaMemcpyArrayToArray cudaMemcpyFromArray
+ cudaMemcpyFromSymbol cudaMemcpyToArray cudaMemcpyToSymbol cudaMemset
+ cudaMemset2D cudaMemset3D cudaSetDevice cudaSetupArgument cudaStreamCreate
+ cudaStreamDestroy cudaStreamQuery cudaStreamSynchronize cudaThreadExit
+ cudaThreadSynchronize cudaUnbindTexture
+]]) +
+
+-- Variables.
+token(lexer.VARIABLE, word_match[[gridDim blockIdx blockDim threadIdx]]))
+
+return lex
diff --git a/lexlua/dart.lua b/lexlua/dart.lua
new file mode 100644
index 000000000..4a2c43b57
--- /dev/null
+++ b/lexlua/dart.lua
@@ -0,0 +1,57 @@
+-- Copyright 2013-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Dart LPeg lexer.
+-- Written by Brian Schott (@Hackerpilot on Github).
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('dart')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ assert break case catch class const continue default do else enum extends
+ false final finally for if in is new null rethrow return super switch this
+ throw true try var void while with
+]]))
+
+-- Built-ins.
+lex:add_rule('builtin', token(lexer.CONSTANT, word_match[[
+ abstract as dynamic export external factory get implements import library
+ operator part set static typedef
+]]))
+
+-- Strings.
+local sq_str = S('r')^-1 * lexer.delimited_range("'", true)
+local dq_str = S('r')^-1 * lexer.delimited_range('"', true)
+local sq_str_multiline = S('r')^-1 * "'''" * (lexer.any - "'''")^0 * P("'''")^-1
+local dq_str_multiline = S('r')^-1 * '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+lex:add_rule('string', token(lexer.STRING, sq_str_multiline + dq_str_multiline +
+ sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline_esc^0 +
+ lexer.nested_pair('/*', '*/')))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.hex_num))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}')))
+
+-- Annotations.
+lex:add_rule('annotation', token('annotation', '@' * lexer.word^1))
+lex:add_style('annotation', lexer.STYLE_PREPROCESSOR)
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/desktop.lua b/lexlua/desktop.lua
new file mode 100644
index 000000000..1c78e3278
--- /dev/null
+++ b/lexlua/desktop.lua
@@ -0,0 +1,56 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Desktop Entry LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('desktop')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keys.
+lex:add_rule('key', token('key', word_match[[
+ Type Version Name GenericName NoDisplay Comment Icon Hidden OnlyShowIn
+ NotShowIn TryExec Exec Exec Path Terminal MimeType Categories StartupNotify
+ StartupWMClass URL
+]]))
+lex:add_style('key', lexer.STYLE_KEYWORD)
+
+-- Values.
+lex:add_rule('value', token('value', word_match[[true false]]))
+lex:add_style('value', lexer.STYLE_CONSTANT)
+
+-- Identifiers.
+lex:add_rule('identifier', lexer.token(lexer.IDENTIFIER,
+ lexer.alpha * (lexer.alnum + S('_-'))^0))
+
+-- Group headers.
+lex:add_rule('header',
+ lexer.starts_line(token('header',
+ lexer.delimited_range('[]', false, true))))
+lex:add_style('header', lexer.STYLE_LABEL)
+
+-- Locales.
+lex:add_rule('locale', token('locale',
+ lexer.delimited_range('[]', false, true)))
+lex:add_style('locale', lexer.STYLE_CLASS)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer)))
+
+-- Field codes.
+lex:add_rule('code', lexer.token('code', P('%') * S('fFuUdDnNickvm')))
+lex:add_style('code', lexer.STYLE_VARIABLE)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=')))
+
+return lex
diff --git a/lexlua/diff.lua b/lexlua/diff.lua
new file mode 100644
index 000000000..a0c62d214
--- /dev/null
+++ b/lexlua/diff.lua
@@ -0,0 +1,32 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Diff LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('diff', {lex_by_line = true})
+
+-- Text, separators, and file headers.
+lex:add_rule('index', token(lexer.COMMENT, 'Index: ' * lexer.any^0 * -1))
+lex:add_rule('separator', token(lexer.COMMENT, ('---' + P('*')^4 + P('=')^1) *
+ lexer.space^0 * -1))
+lex:add_rule('header', token('header', (P('*** ') + '--- ' + '+++ ') *
+ lexer.any^1))
+lex:add_style('header', lexer.STYLE_COMMENT)
+
+-- Location.
+lex:add_rule('location', token(lexer.NUMBER, ('@@' + lexer.digit^1 + '****') *
+ lexer.any^1))
+
+-- Additions, deletions, and changes.
+lex:add_rule('addition', token('addition', S('>+') * lexer.any^0))
+lex:add_style('addition', 'fore:$(color.green)')
+lex:add_rule('deletion', token('deletion', S('<-') * lexer.any^0))
+lex:add_style('deletion', 'fore:$(color.red)')
+lex:add_rule('change', token('change', '!' * lexer.any^0))
+lex:add_style('change', 'fore:$(color.yellow)')
+
+lex:add_rule('any_line', token(lexer.DEFAULT, lexer.any^1))
+
+return lex
diff --git a/lexlua/django.lua b/lexlua/django.lua
new file mode 100644
index 000000000..88406e1f8
--- /dev/null
+++ b/lexlua/django.lua
@@ -0,0 +1,55 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Django LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('django')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ as block blocktrans by endblock endblocktrans comment endcomment cycle date
+ debug else extends filter endfilter firstof for endfor if endif ifchanged
+ endifchanged ifnotequal endifnotequal in load not now or parsed regroup ssi
+ trans with widthratio
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ add addslashes capfirst center cut date default dictsort dictsortreversed
+ divisibleby escape filesizeformat first fix_ampersands floatformat get_digit
+ join length length_is linebreaks linebreaksbr linenumbers ljust lower
+ make_list phone2numeric pluralize pprint random removetags rjust slice slugify
+ stringformat striptags time timesince title truncatewords unordered_list upper
+ urlencode urlize urlizetrunc wordcount wordwrap yesno
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"', false, true)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(':,.|')))
+
+-- Embed Django in HTML.
+local html = lexer.load('html')
+local html_comment = '<!--' * (lexer.any - '-->')^0 * P('-->')^-1
+local django_comment = '{#' * (lexer.any - lexer.newline - '#}')^0 * P('#}')^-1
+html:modify_rule('comment', token(lexer.COMMENT, html_comment + django_comment))
+local django_start_rule = token('django_tag', '{' * S('{%'))
+local django_end_rule = token('django_tag', S('%}') * '}')
+html:embed(lex, django_start_rule, django_end_rule)
+lex:add_style('django_tag', lexer.STYLE_EMBEDDED)
+
+-- Fold points.
+lex:add_fold_point('django_tag', '{{', '}}')
+lex:add_fold_point('django_tag', '{%', '%}')
+
+return lex
diff --git a/lexlua/dmd.lua b/lexlua/dmd.lua
new file mode 100644
index 000000000..f294043d7
--- /dev/null
+++ b/lexlua/dmd.lua
@@ -0,0 +1,178 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- D LPeg lexer.
+-- Heavily modified by Brian Schott (@Hackerpilot on Github).
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'dmd'}
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+local nested_comment = lexer.nested_pair('/+', '+/')
+local comment = token(lexer.COMMENT, line_comment + block_comment +
+ nested_comment)
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true) * S('cwd')^-1
+local dq_str = lexer.delimited_range('"') * S('cwd')^-1
+local lit_str = 'r' * lexer.delimited_range('"', false, true) * S('cwd')^-1
+local bt_str = lexer.delimited_range('`', false, true) * S('cwd')^-1
+local hex_str = 'x' * lexer.delimited_range('"') * S('cwd')^-1
+local other_hex_str = '\\x' * (lexer.xdigit * lexer.xdigit)^1
+local del_str = lexer.nested_pair('q"[', ']"') * S('cwd')^-1 +
+ lexer.nested_pair('q"(', ')"') * S('cwd')^-1 +
+ lexer.nested_pair('q"{', '}"') * S('cwd')^-1 +
+ lexer.nested_pair('q"<', '>"') * S('cwd')^-1 +
+ P('q') * lexer.nested_pair('{', '}') * S('cwd')^-1
+local string = token(lexer.STRING, del_str + sq_str + dq_str + lit_str +
+ bt_str + hex_str + other_hex_str)
+
+-- Numbers.
+local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0
+local hex_num = lexer.hex_num * ('_' * lexer.xdigit^1)^0
+local bin_num = '0' * S('bB') * S('01_')^1
+local oct_num = '0' * S('01234567_')^1
+local integer = S('+-')^-1 * (hex_num + oct_num + bin_num + dec)
+local number = token(lexer.NUMBER, (lexer.float + integer) * S('uUlLdDfFi')^-1)
+
+-- Keywords.
+local keyword = token(lexer.KEYWORD, word_match{
+ 'abstract', 'align', 'asm', 'assert', 'auto', 'body', 'break', 'case', 'cast',
+ 'catch', 'const', 'continue', 'debug', 'default', 'delete',
+ 'deprecated', 'do', 'else', 'extern', 'export', 'false', 'final', 'finally',
+ 'for', 'foreach', 'foreach_reverse', 'goto', 'if', 'import', 'immutable',
+ 'in', 'inout', 'invariant', 'is', 'lazy', 'macro', 'mixin', 'new', 'nothrow',
+ 'null', 'out', 'override', 'pragma', 'private', 'protected', 'public', 'pure',
+ 'ref', 'return', 'scope', 'shared', 'static', 'super', 'switch',
+ 'synchronized', 'this', 'throw','true', 'try', 'typeid', 'typeof', 'unittest',
+ 'version', 'virtual', 'volatile', 'while', 'with', '__gshared', '__thread',
+ '__traits', '__vector', '__parameters'
+})
+
+-- Types.
+local type = token(lexer.TYPE, word_match{
+ 'alias', 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'class',
+ 'creal', 'dchar', 'delegate', 'double', 'enum', 'float', 'function',
+ 'idouble', 'ifloat', 'int', 'interface', 'ireal', 'long', 'module', 'package',
+ 'ptrdiff_t', 'real', 'short', 'size_t', 'struct', 'template', 'typedef',
+ 'ubyte', 'ucent', 'uint', 'ulong', 'union', 'ushort', 'void', 'wchar',
+ 'string', 'wstring', 'dstring', 'hash_t', 'equals_t'
+})
+
+-- Constants.
+local constant = token(lexer.CONSTANT, word_match{
+ '__FILE__', '__LINE__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__',
+ '__VENDOR__', '__VERSION__', '__FUNCTION__', '__PRETTY_FUNCTION__',
+ '__MODULE__',
+})
+
+local class_sequence = token(lexer.TYPE, P('class') + P('struct')) * ws^1 *
+ token(lexer.CLASS, lexer.word)
+
+-- Identifiers.
+local identifier = token(lexer.IDENTIFIER, lexer.word)
+
+-- Operators.
+local operator = token(lexer.OPERATOR, S('?=!<>+-*$/%&|^~.,;()[]{}'))
+
+-- Properties.
+local properties = (type + identifier + operator) * token(lexer.OPERATOR, '.') *
+ token(lexer.VARIABLE, word_match{
+ 'alignof', 'dig', 'dup', 'epsilon', 'idup', 'im', 'init', 'infinity',
+ 'keys', 'length', 'mangleof', 'mant_dig', 'max', 'max_10_exp', 'max_exp',
+ 'min', 'min_normal', 'min_10_exp', 'min_exp', 'nan', 'offsetof', 'ptr',
+ 're', 'rehash', 'reverse', 'sizeof', 'sort', 'stringof', 'tupleof',
+ 'values'
+ })
+
+-- Preprocs.
+local annotation = token('annotation', '@' * lexer.word^1)
+local preproc = token(lexer.PREPROCESSOR, '#' * lexer.nonnewline^0)
+
+-- Traits.
+local traits_list = token('traits', word_match{
+ 'allMembers', 'classInstanceSize', 'compiles', 'derivedMembers',
+ 'getAttributes', 'getMember', 'getOverloads', 'getProtection', 'getUnitTests',
+ 'getVirtualFunctions', 'getVirtualIndex', 'getVirtualMethods', 'hasMember',
+ 'identifier', 'isAbstractClass', 'isAbstractFunction', 'isArithmetic',
+ 'isAssociativeArray', 'isFinalClass', 'isFinalFunction', 'isFloating',
+ 'isIntegral', 'isLazy', 'isNested', 'isOut', 'isOverrideFunction', 'isPOD',
+ 'isRef', 'isSame', 'isScalar', 'isStaticArray', 'isStaticFunction',
+ 'isUnsigned', 'isVirtualFunction', 'isVirtualMethod', 'parent'
+})
+
+local scopes_list = token('scopes', word_match{'exit', 'success', 'failure'})
+
+-- versions
+local versions_list = token('versions', word_match{
+ 'AArch64', 'AIX', 'all', 'Alpha', 'Alpha_HardFloat', 'Alpha_SoftFloat',
+ 'Android', 'ARM', 'ARM_HardFloat', 'ARM_SoftFloat', 'ARM_SoftFP', 'ARM_Thumb',
+ 'assert', 'BigEndian', 'BSD', 'Cygwin', 'D_Coverage', 'D_Ddoc', 'D_HardFloat',
+ 'DigitalMars', 'D_InlineAsm_X86', 'D_InlineAsm_X86_64', 'D_LP64',
+ 'D_NoBoundsChecks', 'D_PIC', 'DragonFlyBSD', 'D_SIMD', 'D_SoftFloat',
+ 'D_Version2', 'D_X32', 'FreeBSD', 'GNU', 'Haiku', 'HPPA', 'HPPA64', 'Hurd',
+ 'IA64', 'LDC', 'linux', 'LittleEndian', 'MIPS32', 'MIPS64', 'MIPS_EABI',
+ 'MIPS_HardFloat', 'MIPS_N32', 'MIPS_N64', 'MIPS_O32', 'MIPS_O64',
+ 'MIPS_SoftFloat', 'NetBSD', 'none', 'OpenBSD', 'OSX', 'Posix', 'PPC', 'PPC64',
+ 'PPC_HardFloat', 'PPC_SoftFloat', 'S390', 'S390X', 'SDC', 'SH', 'SH64',
+ 'SkyOS', 'Solaris', 'SPARC', 'SPARC64', 'SPARC_HardFloat', 'SPARC_SoftFloat',
+ 'SPARC_V8Plus', 'SysV3', 'SysV4', 'unittest', 'Win32', 'Win64', 'Windows',
+ 'X86', 'X86_64'
+})
+
+local versions = token(lexer.KEYWORD, 'version') * lexer.space^0 *
+ token(lexer.OPERATOR, '(') * lexer.space^0 * versions_list
+
+local scopes = token(lexer.KEYWORD, 'scope') * lexer.space^0 *
+ token(lexer.OPERATOR, '(') * lexer.space^0 * scopes_list
+
+local traits = token(lexer.KEYWORD, '__traits') * lexer.space^0 *
+ token(lexer.OPERATOR, '(') * lexer.space^0 * traits_list
+
+local func = token(lexer.FUNCTION, lexer.word) *
+ #(lexer.space^0 * (P('!') * lexer.word^-1 * lexer.space^-1)^-1 *
+ P('('))
+
+M._rules = {
+ {'whitespace', ws},
+ {'class', class_sequence},
+ {'traits', traits},
+ {'versions', versions},
+ {'scopes', scopes},
+ {'keyword', keyword},
+ {'variable', properties},
+ {'type', type},
+ {'function', func},
+ {'constant', constant},
+ {'string', string},
+ {'identifier', identifier},
+ {'comment', comment},
+ {'number', number},
+ {'preproc', preproc},
+ {'operator', operator},
+ {'annotation', annotation},
+}
+
+M._tokenstyles = {
+ annotation = lexer.STYLE_PREPROCESSOR,
+ traits = 'fore:$(color.yellow)',
+ versions = lexer.STYLE_CONSTANT,
+ scopes = lexer.STYLE_CONSTANT
+}
+
+M._foldsymbols = {
+ _patterns = {'[{}]', '/[*+]', '[*+]/', '//'},
+ [lexer.OPERATOR] = {['{'] = 1, ['}'] = -1},
+ [lexer.COMMENT] = {
+ ['/*'] = 1, ['*/'] = -1, ['/+'] = 1, ['+/'] = -1,
+ ['//'] = lexer.fold_line_comments('//')
+ }
+}
+
+return M
diff --git a/lexlua/dockerfile.lua b/lexlua/dockerfile.lua
new file mode 100644
index 000000000..bec2c4341
--- /dev/null
+++ b/lexlua/dockerfile.lua
@@ -0,0 +1,41 @@
+-- Copyright 2016-2018 Alejandro Baez (https://keybase.io/baez). See License.txt.
+-- Dockerfile LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('dockerfile', {fold_by_indentation = true})
+
+-- Whitespace
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ ADD ARG CMD COPY ENTRYPOINT ENV EXPOSE FROM LABEL MAINTAINER ONBUILD RUN
+ STOPSIGNAL USER VOLUME WORKDIR
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Variable.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ S('$')^1 * (S('{')^1 * lexer.word * S('}')^1 +
+ lexer.word)))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"')
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('\\[],=:{}')))
+
+return lex
diff --git a/lexlua/dot.lua b/lexlua/dot.lua
new file mode 100644
index 000000000..54d55a458
--- /dev/null
+++ b/lexlua/dot.lua
@@ -0,0 +1,54 @@
+-- Copyright 2006-2018 Brian "Sir Alaran" Schott. See License.txt.
+-- Dot LPeg lexer.
+-- Based off of lexer code by Mitchell.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('dot')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ graph node edge digraph fontsize rankdir fontname shape label arrowhead
+ arrowtail arrowsize color comment constraint decorate dir headlabel headport
+ headURL labelangle labeldistance labelfloat labelfontcolor labelfontname
+ labelfontsize layer lhead ltail minlen samehead sametail style taillabel
+ tailport tailURL weight subgraph
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ box polygon ellipse circle point egg triangle plaintext diamond trapezium
+ parallelogram house pentagon hexagon septagon octagon doublecircle
+ doubleoctagon tripleoctagon invtriangle invtrapezium invhouse Mdiamond Msquare
+ Mcircle rect rectangle none note tab folder box3d record
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.digit^1 + lexer.float))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('->()[]{};')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/eiffel.lua b/lexlua/eiffel.lua
new file mode 100644
index 000000000..64ccd8bf3
--- /dev/null
+++ b/lexlua/eiffel.lua
@@ -0,0 +1,60 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Eiffel LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('eiffel')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ alias all and as check class creation debug deferred do else elseif end ensure
+ expanded export external feature from frozen if implies indexing infix inherit
+ inspect invariant is like local loop not obsolete old once or prefix redefine
+ rename require rescue retry select separate then undefine until variant when
+ xor
+ current false precursor result strip true unique void
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ character string bit boolean integer real none any
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,:;?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'check', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'debug', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'deferred', function(text, pos, line, s)
+ return line:find('deferred%s+class') and 0 or 1
+end)
+lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'from', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'inspect', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'once', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'class', function(text, pos, line, s)
+ return line:find('deferred%s+class') and 0 or 1
+end)
+lex:add_fold_point(lexer.COMMENT, '--', lexer.fold_line_comments('--'))
+
+return lex
diff --git a/lexlua/elixir.lua b/lexlua/elixir.lua
new file mode 100644
index 000000000..56bf4ec40
--- /dev/null
+++ b/lexlua/elixir.lua
@@ -0,0 +1,107 @@
+-- Copyright 2015-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Contributed by Richard Philips.
+-- Elixir LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local B, P, R, S = lpeg.B, lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('elixir', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Sigils.
+local sigil11 = P("~") * S("CRSW") * lexer.delimited_range('<>', false, true)
+local sigil12 = P("~") * S("CRSW") * lexer.delimited_range('{}', false, true)
+local sigil13 = P("~") * S("CRSW") * lexer.delimited_range('[]', false, true)
+local sigil14 = P("~") * S("CRSW") * lexer.delimited_range('()', false, true)
+local sigil15 = P("~") * S("CRSW") * lexer.delimited_range('|', false, true)
+local sigil16 = P("~") * S("CRSW") * lexer.delimited_range('/', false, true)
+local sigil17 = P("~") * S("CRSW") * lexer.delimited_range('"', false, true)
+local sigil18 = P("~") * S("CRSW") * lexer.delimited_range("'", false, true)
+local sigil19 = P("~") * S("CRSW") * '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local sigil10 = P("~") * S("CRSW") * "'''" * (lexer.any - "'''")^0 * P("'''")^-1
+local sigil21 = P("~") * S("crsw") * lexer.delimited_range('<>', false, false)
+local sigil22 = P("~") * S("crsw") * lexer.delimited_range('{}', false, false)
+local sigil23 = P("~") * S("crsw") * lexer.delimited_range('[]', false, false)
+local sigil24 = P("~") * S("crsw") * lexer.delimited_range('()', false, false)
+local sigil25 = P("~") * S("crsw") * lexer.delimited_range('|', false, false)
+local sigil26 = P("~") * S("crsw") * lexer.delimited_range('/', false, false)
+local sigil27 = P("~") * S("crsw") * lexer.delimited_range('"', false, false)
+local sigil28 = P("~") * S("crsw") * lexer.delimited_range("'", false, false)
+local sigil29 = P("~") * S("csrw") * '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local sigil20 = P("~") * S("csrw") * "'''" * (lexer.any - "'''")^0 * P("'''")^-1
+local sigil_token = token(lexer.REGEX, sigil10 + sigil19 + sigil11 + sigil12 +
+ sigil13 + sigil14 + sigil15 + sigil16 +
+ sigil17 + sigil18 + sigil20 + sigil29 +
+ sigil21 + sigil22 + sigil23 + sigil24 +
+ sigil25 + sigil26 + sigil27 + sigil28)
+local sigiladdon_token = token(lexer.EMBEDDED, R('az', 'AZ')^0)
+lex:add_rule('sigil', sigil_token * sigiladdon_token)
+
+-- Atoms.
+local atom1 = B(1 - P(':')) * P(':') * lexer.delimited_range('"', false)
+local atom2 = B(1 - P(':')) * P(':') * R('az', 'AZ') *
+ R('az', 'AZ', '__', '@@', '09')^0 * S('?!')^-1
+local atom3 = B(1 - R('az', 'AZ', '__', '09', '::')) *
+ R('AZ') * R('az', 'AZ', '__', '@@', '09')^0 * S('?!')^-1
+lex:add_rule('atom', token(lexer.CONSTANT, atom1 + atom2 + atom3))
+
+-- Strings.
+local dq_str = lexer.delimited_range('"', false)
+local triple_dq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+lex:add_rule('string', token(lexer.STRING, triple_dq_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline_esc^0))
+
+-- Attributes.
+lex:add_rule('attribute', token(lexer.LABEL, B(1 - R('az', 'AZ', '__')) *
+ P('@') * R('az','AZ') *
+ R('az','AZ','09','__')^0))
+
+-- Booleans.
+lex:add_rule('boolean', token(lexer.NUMBER, P(':')^-1 *
+ word_match[[true false nil]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ defstruct defrecordp defrecord defprotocol defp defoverridable defmodule
+ defmacrop defmacro defimpl defexception defdelegate defcallback def
+]]))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ is_atom is_binary is_bitstring is_boolean is_float is_function is_integer
+ is_list is_map is_number is_pid is_port is_record is_reference is_tuple
+ is_exception case when cond for if unless try receive send exit raise throw
+ after rescue catch else do end quote unquote super import require alias use
+ self with fn
+]]))
+
+-- Operators
+local operator1 = word_match[[and or not when xor in]]
+local operator2 = P('!==') + '!=' + '!' + '=~' + '===' + '==' + '=' + '<<<' +
+ '<<' + '<=' + '<-' + '<' + '>>>' + '>>' + '>=' + '>' + '->' +
+ '--' + '-' + '++' + '+' + '&&&' + '&&' + '&' + '|||' + '||' +
+ '|>' + '|' + '..' + '.' + '^^^' + '^' + '\\\\' + '::' + '*' +
+ '/' + '~~~' + '@'
+lex:add_rule('operator', token(lexer.OPERATOR, operator1 + operator2))
+
+-- Identifiers
+lex:add_rule('identifier', token(lexer.IDENTIFIER, R('az', '__') *
+ R('az', 'AZ', '__', '09')^0 *
+ S('?!')^-1))
+
+-- Numbers
+local dec = lexer.digit * (lexer.digit + P("_"))^0
+local bin = '0b' * S('01')^1
+local oct = '0o' * R('07')^1
+local integer = bin + lexer.hex_num + oct + dec
+local float = lexer.digit^1 * P(".") * lexer.digit^1 * S("eE") *
+ (S('+-')^-1 * lexer.digit^1)^-1
+lex:add_rule('number', B(1 - R('az', 'AZ', '__')) * S('+-')^-1 *
+ token(lexer.NUMBER, float + integer))
+
+return lex
diff --git a/lexlua/erlang.lua b/lexlua/erlang.lua
new file mode 100644
index 000000000..973601ab2
--- /dev/null
+++ b/lexlua/erlang.lua
@@ -0,0 +1,89 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Erlang LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('erlang')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ after begin case catch cond end fun if let of query receive try when
+ -- Operators.
+ div rem or xor bor bxor bsl bsr and band not bnot badarg nocookie orelse
+ andalso false true
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs alive apply atom_to_list binary_to_list binary_to_term concat_binary date
+ disconnect_node element erase exit float float_to_list get get_keys
+ group_leader halt hd integer_to_list is_alive is_record length link
+ list_to_atom list_to_binary list_to_float list_to_integer list_to_pid
+ list_to_tuple load_module make_ref monitor_node node nodes now open_port
+ pid_to_list process_flag process_info process put register registered round
+ self setelement size spawn spawn_link split_binary statistics term_to_binary
+ throw time tl trunc tuple_to_list unlink unregister whereis
+ -- Others.
+ any atom binary bitstring byte constant function integer list map mfa
+ non_neg_integer number pid ports port_close port_info pos_integer reference
+ record
+ -- Erlang.
+ check_process_code delete_module get_cookie hash math module_loaded preloaded
+ processes purge_module set_cookie set_node
+ -- Math.
+ acos asin atan atan2 cos cosh exp log log10 min max pi pow power sin sinh sqrt
+ tan tanh
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.lower *
+ ('_' + lexer.alnum)^0))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, P('_')^0 * lexer.upper *
+ ('_' + lexer.alnum)^0))
+
+-- Directives.
+lex:add_rule('directive', token('directive', '-' * word_match[[
+ author behaviour behavior compile copyright define doc else endif export file
+ ifdef ifndef import include include_lib module record spec type undef
+]]))
+lex:add_style('directive', lexer.STYLE_PREPROCESSOR)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"') +
+ '$' * lexer.any * lexer.alnum^0))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('-<>.;=/|+*:,!()[]{}')))
+
+-- Preprocessor.
+lex:add_rule('preprocessor', token(lexer.TYPE, '?' * lexer.word))
+
+-- Records.
+lex:add_rule('type', token(lexer.TYPE, '#' * lexer.word))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'case', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'fun', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'query', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'receive', 'end')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '%', lexer.fold_line_comments('%'))
+
+return lex
diff --git a/lexlua/faust.lua b/lexlua/faust.lua
new file mode 100644
index 000000000..2865a633c
--- /dev/null
+++ b/lexlua/faust.lua
@@ -0,0 +1,47 @@
+-- Copyright 2015-2018 David B. Lamkins <david@lamkins.net>. See License.txt.
+-- Faust LPeg lexer, see http://faust.grame.fr/
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('faust')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ declare import mdoctags dependencies distributed inputs outputs par seq sum
+ prod xor with environment library component ffunction fvariable fconstant int
+ float case waveform h: v: t:
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+local int = R('09')^1
+local rad = P('.')
+local exp = (P('e') * S('+-')^-1 * int)^-1
+local flt = int * (rad * int)^-1 * exp + int^-1 * rad * int * exp
+lex:add_rule('number', token(lexer.NUMBER, flt + int))
+
+-- Pragmas.
+lex:add_rule('pragma', token(lexer.PREPROCESSOR, P('<mdoc>') *
+ (lexer.any - P('</mdoc>'))^0 *
+ P('</mdoc>')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\'')))
+
+return lex
diff --git a/lexlua/fish.lua b/lexlua/fish.lua
new file mode 100644
index 000000000..5bc27879f
--- /dev/null
+++ b/lexlua/fish.lua
@@ -0,0 +1,58 @@
+-- Copyright 2015-2018 Jason Schindler. See License.txt.
+-- Fish (http://fishshell.com/) script LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('fish')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ alias and begin bg bind block break breakpoint builtin case cd command
+ commandline complete contains continue count dirh dirs echo else emit end eval
+ exec exit fg fish fish_config fishd fish_indent fish_pager fish_prompt
+ fish_right_prompt fish_update_completions for funced funcsave function
+ functions help history if in isatty jobs math mimedb nextd not open or popd
+ prevd psub pushd pwd random read return set set_color source status switch
+ test trap type ulimit umask vared while
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ '$' * (lexer.word +
+ lexer.delimited_range('{}', true, true))))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"')
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Shebang.
+lex:add_rule('shebang', token('shebang', '#!/' * lexer.nonnewline^0))
+lex:add_style('shebang', lexer.STYLE_LABEL)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'begin', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'for', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'function', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'switch', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'while', 'end')
+
+return lex
diff --git a/lexlua/forth.lua b/lexlua/forth.lua
new file mode 100644
index 000000000..086ce780c
--- /dev/null
+++ b/lexlua/forth.lua
@@ -0,0 +1,56 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Forth LPeg lexer.
+-- Contributions from Joseph Eib.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('forth')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Strings.
+local c_str = 'c' * lexer.delimited_range('"', true, true)
+local s_str = 's' * lexer.delimited_range('"', true, true)
+local s_bs_str = 's\\' * lexer.delimited_range('"', true, false)
+local dot_str = '.' * lexer.delimited_range('"', true, true)
+local dot_paren_str = '.' * lexer.delimited_range('()', true, true, false)
+local abort_str = 'abort' * lexer.delimited_range('"', true, true)
+lex:add_rule('string', token(lexer.STRING, c_str + s_str + s_bs_str + dot_str +
+ dot_paren_str + abort_str))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ #> #s */ */mod +loop , . .r /mod 0< 0<> 0> 0= 1+ 1- 2! 2* 2/ 2>r 2@ 2drop 2dup
+ 2over 2r> 2r@ 2swap :noname <# <> >body >in >number >r ?do ?dup @ abort abs
+ accept action-of again align aligned allot and base begin bl buffer: c! c, c@
+ case cell+ cells char char+ chars compile, constant, count cr create decimal
+ defer defer! defer@ depth do does> drop dup else emit endcase endof
+ environment? erase evaluate execute exit false fill find fm/mod here hex hold
+ holds i if immediate invert is j key leave literal loop lshift m* marker max
+ min mod move negate nip of or over pad parse parse-name pick postpone quit r>
+ r@ recurse refill restore-input roll rot rshift s>d save-input sign sm/rem
+ source source-id space spaces state swap to then true tuck type u. u.r u> u<
+ um* um/mod unloop until unused value variable while within word xor ['] [char]
+ [compile]
+]], true))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alnum +
+ S('+-*=<>.?/\'%,_$#'))^1))
+
+-- Comments.
+local line_comment = S('|\\') * lexer.nonnewline^0
+local block_comment = '(' * (lexer.any - ')')^0 * P(')')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 *
+ (S('./') * lexer.digit^1)^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(':;<>+*-/[]#')))
+
+return lex
diff --git a/lexlua/fortran.lua b/lexlua/fortran.lua
new file mode 100644
index 000000000..98f2e7b38
--- /dev/null
+++ b/lexlua/fortran.lua
@@ -0,0 +1,72 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Fortran LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('fortran')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+local c_comment = lexer.starts_line(S('Cc')) * lexer.nonnewline^0
+local d_comment = lexer.starts_line(S('Dd')) * lexer.nonnewline^0
+local ex_comment = lexer.starts_line('!') * lexer.nonnewline^0
+local ast_comment = lexer.starts_line('*') * lexer.nonnewline^0
+local line_comment = '!' * lexer.nonnewline^0
+lex:add_rule('comment', token(lexer.COMMENT, c_comment + d_comment +
+ ex_comment + ast_comment +
+ line_comment))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ include program module subroutine function contains use call return
+ -- Statements.
+ case select default continue cycle do while else if elseif then elsewhere end
+ endif enddo forall where exit goto pause stop
+ -- Operators.
+ .not. .and. .or. .xor. .eqv. .neqv. .eq. .ne. .gt. .ge. .lt. .le.
+ -- Logical.
+ .false. .true.
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ -- I/O.
+ backspace close endfile inquire open print read rewind write format
+ -- Type conversion utility and math.
+ aimag aint amax0 amin0 anint ceiling cmplx conjg dble dcmplx dfloat dim dprod
+ float floor ifix imag int logical modulo nint real sign sngl transfer zext abs
+ acos aimag aint alog alog10 amax0 amax1 amin0 amin1 amod anint asin atan atan2
+ cabs ccos char clog cmplx conjg cos cosh csin csqrt dabs dacos dasin datan
+ datan2 dble dcos dcosh ddim dexp dim dint dlog dlog10 dmax1 dmin1 dmod dnint
+ dprod dreal dsign dsin dsinh dsqrt dtan dtanh exp float iabs ichar idim idint
+ idnint ifix index int isign len lge lgt lle llt log log10 max max0 max1 min
+ min0 min1 mod nint real sign sin sinh sngl sqrt tan tanh
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match([[
+ implicit explicit none data parameter allocate allocatable allocated
+ deallocate integer real double precision complex logical character dimension
+ kind
+]], true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ -lexer.alpha))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alnum^1))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true, true)
+local dq_str = lexer.delimited_range('"', true, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<>=&+-/*,()')))
+
+return lex
diff --git a/lexlua/fsharp.lua b/lexlua/fsharp.lua
new file mode 100644
index 000000000..b651d5fe5
--- /dev/null
+++ b/lexlua/fsharp.lua
@@ -0,0 +1,59 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- F# LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('fsharp', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract and as assert asr begin class default delegate do done downcast
+ downto else end enum exception false finaly for fun function if in iherit
+ interface land lazy let lor lsl lsr lxor match member mod module mutable
+ namespace new null of open or override sig static struct then to true try type
+ val when inline upcast while with async atomic break checked component const
+ constructor continue eager event external fixed functor include method mixin
+ process property protected public pure readonly return sealed switch virtual
+ void volatile where
+ -- Booleans.
+ true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool byte sbyte int16 uint16 int uint32 int64 uint64 nativeint unativeint char
+ string decimal unit void float32 single float double
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline^0 +
+ lexer.nested_pair('(*', '*)')))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float +
+ lexer.integer * S('uUlL')^-1)))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ else endif endregion if ifdef ifndef light region
+]]
+lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ S('\t ')^0 * preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('=<>+-*/^.,:;~!@#%^&|?[](){}')))
+
+return lex
diff --git a/lexlua/gap.lua b/lexlua/gap.lua
new file mode 100644
index 000000000..de7c38cf4
--- /dev/null
+++ b/lexlua/gap.lua
@@ -0,0 +1,42 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Gap LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('gap')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and break continue do elif else end fail false fi for function if in infinity
+ local not od or rec repeat return then true until while
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.digit^1 * -lexer.alpha))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('*+-,./:;<=>~^#()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'function', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'do', 'od')
+lex:add_fold_point(lexer.KEYWORD, 'if', 'fi')
+lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/gettext.lua b/lexlua/gettext.lua
new file mode 100644
index 000000000..5ff8c14d4
--- /dev/null
+++ b/lexlua/gettext.lua
@@ -0,0 +1,31 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Gettext LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('gettext')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ msgid msgid_plural msgstr fuzzy c-format no-c-format
+]], true)))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, S('%$@') * lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * S(': .~') *
+ lexer.nonnewline^0))
+
+return lex
diff --git a/lexlua/gherkin.lua b/lexlua/gherkin.lua
new file mode 100644
index 000000000..f4a7924db
--- /dev/null
+++ b/lexlua/gherkin.lua
@@ -0,0 +1,41 @@
+-- Copyright 2015-2018 Jason Schindler. See License.txt.
+-- Gherkin (https://github.com/cucumber/cucumber/wiki/Gherkin) LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('gherkin', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ And Background But Examples Feature Given Outline Scenario Scenarios Then When
+]]))
+
+-- Strings.
+local doc_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local dq_str = lexer.delimited_range('"')
+lex:add_rule('string', token(lexer.STRING, doc_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+local number = token(lexer.NUMBER, lexer.float + lexer.integer)
+
+-- Tags.
+lex:add_rule('tag', token('tag', '@' * lexer.word^0))
+lex:add_style('tag', lexer.STYLE_LABEL)
+
+-- Placeholders.
+lex:add_rule('placeholder', token('placeholder', lexer.nested_pair('<', '>')))
+lex:add_style('placeholder', lexer.STYLE_VARIABLE)
+
+-- Examples.
+lex:add_rule('example', token('example', '|' * lexer.nonnewline^0))
+lex:add_style('example', lexer.STYLE_NUMBER)
+
+return lex
diff --git a/lexlua/glsl.lua b/lexlua/glsl.lua
new file mode 100644
index 000000000..497d81636
--- /dev/null
+++ b/lexlua/glsl.lua
@@ -0,0 +1,109 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- GLSL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('glsl', {inherit = lexer.load('cpp')})
+
+-- Whitespace.
+lex:modify_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:modify_rule('keyword', token(lexer.KEYWORD, word_match[[
+ attribute const in inout out uniform varying invariant centroid flat smooth
+ noperspective layout patch sample subroutine lowp mediump highp precision
+ -- Macros.
+ __VERSION__ __LINE__ __FILE__
+]]) + lex:get_rule('keyword'))
+
+-- Types.
+lex:modify_rule('type',
+ token(lexer.TYPE,
+ S('bdiu')^-1 * 'vec' * R('24') +
+ P('d')^-1 * 'mat' * R('24') * ('x' * R('24')^-1) +
+ S('iu')^-1 * 'sampler' * R('13') * 'D' +
+ 'sampler' * R('12') * 'D' * P('Array')^-1 * 'Shadow' +
+ S('iu')^-1 * 'sampler' * (R('12') * 'DArray' +
+ word_match[[
+ Cube 2DRect Buffer 2DMS 2DMSArray 2DMSCubeArray
+ ]]) +
+ word_match[[
+ samplerCubeShadow sampler2DRectShadow
+ samplerCubeArrayShadow
+ ]]) +
+ lex:get_rule('type') +
+
+-- Functions.
+token(lexer.FUNCTION, word_match[[
+ radians degrees sin cos tan asin acos atan sinh cosh tanh asinh acosh atanh
+ pow exp log exp2 log2 sqrt inversesqrt abs sign floor trunc round roundEven
+ ceil fract mod modf min max clamp mix step smoothstep isnan isinf
+ floatBitsToInt floatBitsToUint intBitsToFloat uintBitsToFloat fma frexp ldexp
+ packUnorm2x16 packUnorm4x8 packSnorm4x8 unpackUnorm2x16 unpackUnorm4x8
+ unpackSnorm4x8 packDouble2x32 unpackDouble2x32 length distance dot cross
+ normalize ftransform faceforward reflect refract matrixCompMult outerProduct
+ transpose determinant inverse lessThan lessThanEqual greaterThan
+ greaterThanEqual equal notEqual any all not uaddCarry usubBorrow umulExtended
+ imulExtended bitfieldExtract bitfildInsert bitfieldReverse bitCount findLSB
+ findMSB textureSize textureQueryLOD texture textureProj textureLod
+ textureOffset texelFetch texelFetchOffset textureProjOffset textureLodOffset
+ textureProjLod textureProjLodOffset textureGrad textureGradOffset
+ textureProjGrad textureProjGradOffset textureGather textureGatherOffset
+ texture1D texture2D texture3D texture1DProj texture2DProj texture3DProj
+ texture1DLod texture2DLod texture3DLod texture1DProjLod texture2DProjLod
+ texture3DProjLod textureCube textureCubeLod shadow1D shadow2D shadow1DProj
+ shadow2DProj shadow1DLod shadow2DLod shadow1DProjLod shadow2DProjLod dFdx dFdy
+ fwidth interpolateAtCentroid interpolateAtSample interpolateAtOffset noise1
+ noise2 noise3 noise4 EmitStreamVertex EndStreamPrimitive EmitVertex
+ EndPrimitive barrier
+]]) +
+
+-- Variables.
+token(lexer.VARIABLE, word_match[[
+ gl_VertexID gl_InstanceID gl_Position gl_PointSize gl_ClipDistance
+ gl_PrimitiveIDIn gl_InvocationID gl_PrimitiveID gl_Layer gl_PatchVerticesIn
+ gl_TessLevelOuter gl_TessLevelInner gl_TessCoord gl_FragCoord gl_FrontFacing
+ gl_PointCoord gl_SampleID gl_SamplePosition gl_FragColor gl_FragData
+ gl_FragDepth gl_SampleMask gl_ClipVertex gl_FrontColor gl_BackColor
+ gl_FrontSecondaryColor gl_BackSecondaryColor gl_TexCoord gl_FogFragCoord
+ gl_Color gl_SecondaryColor gl_Normal gl_Vertex gl_MultiTexCoord0
+ gl_MultiTexCoord1 gl_MultiTexCoord2 gl_MultiTexCoord3 gl_MultiTexCoord4
+ gl_MultiTexCoord5 gl_MultiTexCoord6 gl_MultiTexCoord7 gl_FogCoord
+]]) +
+
+-- Constants.
+token(lexer.CONSTANT, word_match[[
+ gl_MaxVertexAttribs gl_MaxVertexUniformComponents gl_MaxVaryingFloats
+ gl_MaxVaryingComponents gl_MaxVertexOutputComponents
+ gl_MaxGeometryInputComponents gl_MaxGeometryOutputComponents
+ gl_MaxFragmentInputComponents gl_MaxVertexTextureImageUnits
+ gl_MaxCombinedTextureImageUnits gl_MaxTextureImageUnits
+ gl_MaxFragmentUniformComponents gl_MaxDrawBuffers gl_MaxClipDistances
+ gl_MaxGeometryTextureImageUnits gl_MaxGeometryOutputVertices
+ gl_MaxGeometryTotalOutputComponents gl_MaxGeometryUniformComponents
+ gl_MaxGeometryVaryingComponents gl_MaxTessControlInputComponents
+ gl_MaxTessControlOutputComponents gl_MaxTessControlTextureImageUnits
+ gl_MaxTessControlUniformComponents gl_MaxTessControlTotalOutputComponents
+ gl_MaxTessEvaluationInputComponents gl_MaxTessEvaluationOutputComponents
+ gl_MaxTessEvaluationTextureImageUnits gl_MaxTessEvaluationUniformComponents
+ gl_MaxTessPatchComponents gl_MaxPatchVertices gl_MaxTessGenLevel
+ gl_MaxTextureUnits gl_MaxTextureCoords gl_MaxClipPlanes
+
+ gl_DepthRange gl_ModelViewMatrix gl_ProjectionMatrix
+ gl_ModelViewProjectionMatrix gl_TextureMatrix gl_NormalMatrix
+ gl_ModelViewMatrixInverse gl_ProjectionMatrixInverse
+ gl_ModelViewProjectionMatrixInverse gl_TextureMatrixInverse
+ gl_ModelViewMatrixTranspose gl_ProjectionMatrixTranspose
+ gl_ModelViewProjectionMatrixTranspose gl_TextureMatrixTranspose
+ gl_ModelViewMatrixInverseTranspose gl_ProjectionMatrixInverseTranspose
+ gl_ModelViewProjectionMatrixInverseTranspose gl_TextureMatrixInverseTranspose
+ gl_NormalScale gl_ClipPlane gl_Point gl_FrontMaterial gl_BackMaterial
+ gl_LightSource gl_LightModel gl_FrontLightModelProduct
+ gl_BackLightModelProduct gl_FrontLightProduct gl_BackLightProduct
+ gl_TextureEnvColor gl_EyePlaneS gl_EyePlaneT gl_EyePlaneR gl_EyePlaneQ
+ gl_ObjectPlaneS gl_ObjectPlaneT gl_ObjectPlaneR gl_ObjectPlaneQ gl_Fog
+]]))
+
+return lex
diff --git a/lexlua/gnuplot.lua b/lexlua/gnuplot.lua
new file mode 100644
index 000000000..c33854d20
--- /dev/null
+++ b/lexlua/gnuplot.lua
@@ -0,0 +1,59 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Gnuplot LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('gnuplot')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ cd call clear exit fit help history if load pause plot using with index every
+ smooth thru print pwd quit replot reread reset save set show unset shell splot
+ system test unset update
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs acos acosh arg asin asinh atan atan2 atanh besj0 besj1 besy0 besy1 ceil
+ cos cosh erf erfc exp floor gamma ibeta inverf igamma imag invnorm int
+ lambertw lgamma log log10 norm rand real sgn sin sinh sqrt tan tanh column
+ defined tm_hour tm_mday tm_min tm_mon tm_sec tm_wday tm_yday tm_year valid
+]]))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, word_match[[
+ angles arrow autoscale bars bmargin border boxwidth clabel clip cntrparam
+ colorbox contour datafile decimalsign dgrid3d dummy encoding fit fontpath
+ format functions function grid hidden3d historysize isosamples key label
+ lmargin loadpath locale logscale mapping margin mouse multiplot mx2tics mxtics
+ my2tics mytics mztics offsets origin output parametric plot pm3d palette
+ pointsize polar print rmargin rrange samples size style surface terminal tics
+ ticslevel ticscale timestamp timefmt title tmargin trange urange variables
+ version view vrange x2data x2dtics x2label x2mtics x2range x2tics x2zeroaxis
+ xdata xdtics xlabel xmtics xrange xtics xzeroaxis y2data y2dtics y2label
+ y2mtics y2range y2tics y2zeroaxis ydata ydtics ylabel ymtics yrange ytics
+ yzeroaxis zdata zdtics cbdata cbdtics zero zeroaxis zlabel zmtics zrange ztics
+ cblabel cbmtics cbrange cbtics
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"') +
+ lexer.delimited_range('[]', true) +
+ lexer.delimited_range('{}', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('-+~!$*%=<>&|^?:()')))
+
+return lex
diff --git a/lexlua/go.lua b/lexlua/go.lua
new file mode 100644
index 000000000..5fff60938
--- /dev/null
+++ b/lexlua/go.lua
@@ -0,0 +1,62 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Go LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('go')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break case chan const continue default defer else fallthrough for func go goto
+ if import interface map package range return select struct switch type var
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ true false iota nil
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool byte complex64 complex128 error float32 float64 int int8 int16 int32
+ int64 rune string uint uint8 uint16 uint32 uint64 uintptr
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ append cap close complex copy delete imag len make new panic print println
+ real recover
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local raw_str = lexer.delimited_range('`', false, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + raw_str))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ P('i')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%&|^<>=!:;.,()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/groovy.lua b/lexlua/groovy.lua
new file mode 100644
index 000000000..d27ea7132
--- /dev/null
+++ b/lexlua/groovy.lua
@@ -0,0 +1,69 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Groovy LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('groovy')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract break case catch continue default do else extends final finally for
+ if implements instanceof native new private protected public return static
+ switch synchronized throw throws transient try volatile while strictfp package
+ import as assert def mixin property test using in
+ false null super this true it
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs any append asList asWritable call collect compareTo count div dump each
+ eachByte eachFile eachLine every find findAll flatten getAt getErr getIn
+ getOut getText grep immutable inject inspect intersect invokeMethods isCase
+ join leftShift minus multiply newInputStream newOutputStream newPrintWriter
+ newReader newWriter next plus pop power previous print println push putAt read
+ readBytes readLines reverse reverseEach round size sort splitEachLine step
+ subMap times toInteger toList tokenize upto waitForOrKill withPrintWriter
+ withReader withStream withWriter withWriterAppend write writeLine
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ boolean byte char class double float int interface long short void
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local triple_sq_str = "'''" * (lexer.any - "'''")^0 * P("'''")^-1
+local triple_dq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local regex_str = #P('/') * lexer.last_char_includes('=~|!<>+-*?&,:;([{') *
+ lexer.delimited_range('/', true)
+lex:add_rule('string', token(lexer.STRING, triple_sq_str + triple_dq_str +
+ sq_str + dq_str) +
+ token(lexer.REGEX, regex_str))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=~|!<>+-/*?&.,:;()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/gtkrc.lua b/lexlua/gtkrc.lua
new file mode 100644
index 000000000..6c6b23003
--- /dev/null
+++ b/lexlua/gtkrc.lua
@@ -0,0 +1,58 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Gtkrc LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('gtkrc')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ binding class include module_path pixmap_path im_module_file style widget
+ widget_class
+]]))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, word_match[[
+ bg fg base text xthickness ythickness bg_pixmap font fontset font_name stock
+ color engine
+]]))
+
+-- States.
+lex:add_rule('state', token('state', word_match[[
+ ACTIVE SELECTED NORMAL PRELIGHT INSENSITIVE TRUE FALSE
+]]))
+lex:add_style('state', lexer.STYLE_CONSTANT)
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ mix shade lighter darker
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alpha *
+ (lexer.alnum + S('_-'))^0))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.digit^1 *
+ ('.' * lexer.digit^1)^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(':=,*()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/haskell.lua b/lexlua/haskell.lua
new file mode 100644
index 000000000..484069cb3
--- /dev/null
+++ b/lexlua/haskell.lua
@@ -0,0 +1,45 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Haskell LPeg lexer.
+-- Modified by Alex Suraci.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('haskell', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ case class data default deriving do else if import in infix infixl infixr
+ instance let module newtype of then type where _ as qualified hiding
+]]))
+
+local word = (lexer.alnum + S("._'#"))^0
+local op = lexer.punct - S('()[]{}')
+
+-- Types & type constructors.
+lex:add_rule('type', token(lexer.TYPE, (lexer.upper * word) +
+ (":" * (op^1 - ":"))))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"')))
+
+-- Comments.
+local line_comment = '--' * lexer.nonnewline_esc^0
+local block_comment = '{-' * (lexer.any - '-}')^0 * P('-}')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, op))
+
+return lex
diff --git a/lexlua/html.lua b/lexlua/html.lua
new file mode 100644
index 000000000..8c8d999c7
--- /dev/null
+++ b/lexlua/html.lua
@@ -0,0 +1,149 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- HTML LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('html')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '<!--' * (lexer.any - '-->')^0 *
+ P('-->')^-1))
+
+-- Doctype.
+lex:add_rule('doctype', token('doctype', '<!' * word_match([[doctype]], true) *
+ (lexer.any - '>')^1 * '>'))
+lex:add_style('doctype', lexer.STYLE_COMMENT)
+
+-- Elements.
+local known_element = token('element', '<' * P('/')^-1 * word_match([[
+ a abbr address area article aside audio b base bdi bdo blockquote body br
+ button canvas caption cite code col colgroup content data datalist dd
+ decorator del details dfn div dl dt element em embed fieldset figcaption
+ figure footer form h1 h2 h3 h4 h5 h6 head header hr html i iframe img input
+ ins kbd keygen label legend li link main map mark menu menuitem meta meter nav
+ noscript object ol optgroup option output p param pre progress q rp rt ruby s
+ samp script section select shadow small source spacer span strong style sub
+ summary sup table tbody td template textarea tfoot th thead time title tr
+ track u ul var video wbr
+]], true))
+local unknown_element = token('unknown_element', '<' * P('/')^-1 * lexer.word)
+local element = known_element + unknown_element
+lex:add_rule('element', element)
+lex:add_style('element', lexer.STYLE_KEYWORD)
+lex:add_style('unknown_element', lexer.STYLE_KEYWORD..',italics')
+
+-- Closing tags.
+local tag_close = token('element', P('/')^-1 * '>')
+lex:add_rule('tag_close', tag_close)
+
+-- Attributes.
+local known_attribute = token('attribute', word_match([[
+ accept accept-charset accesskey action align alt async autocomplete autofocus
+ autoplay bgcolor border buffered challenge charset checked cite class code
+ codebase color cols colspan content contenteditable contextmenu controls
+ coords data data- datetime default defer dir dirname disabled download
+ draggable dropzone enctype for form headers height hidden high href hreflang
+ http-equiv icon id ismap itemprop keytype kind label lang language list loop
+ low manifest max maxlength media method min multiple name novalidate open
+ optimum pattern ping placeholder poster preload pubdate radiogroup readonly
+ rel required reversed role rows rowspan sandbox scope scoped seamless selected
+ shape size sizes span spellcheck src srcdoc srclang start step style summary
+ tabindex target title type usemap value width wrap
+]], true) + ((P('data-') + 'aria-') * (lexer.alnum + '-')^1))
+local unknown_attribute = token('unknown_attribute', lexer.word)
+local attribute = (known_attribute + unknown_attribute) * #(lexer.space^0 * '=')
+lex:add_rule('attribute', attribute)
+lex:add_style('attribute', lexer.STYLE_TYPE)
+lex:add_style('unknown_attribute', lexer.STYLE_TYPE..',italics')
+
+-- TODO: performance is terrible on large files.
+local in_tag = P(function(input, index)
+ local before = input:sub(1, index - 1)
+ local s, e = before:find('<[^>]-$'), before:find('>[^<]-$')
+ if s and e then return s > e and index or nil end
+ if s then return index end
+ return input:find('^[^<]->', index) and index or nil
+end)
+
+-- Equals.
+local equals = token(lexer.OPERATOR, '=') --* in_tag
+--lex:add_rule('equals', equals)
+
+-- Strings.
+local string = #S('\'"') * lexer.last_char_includes('=') *
+ token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"'))
+lex:add_rule('string', string)
+
+-- Numbers.
+lex:add_rule('number', #lexer.digit * lexer.last_char_includes('=') *
+ token(lexer.NUMBER, lexer.digit^1 * P('%')^-1))--*in_tag)
+
+-- Entities.
+lex:add_rule('entity', token('entity', '&' * (lexer.any - lexer.space - ';')^1 *
+ ';'))
+lex:add_style('entity', lexer.STYLE_COMMENT)
+
+-- Fold points.
+local function disambiguate_lt(text, pos, line, s)
+ return not line:find('^</', s) and 1 or -1
+end
+lex:add_fold_point('element', '<', disambiguate_lt)
+lex:add_fold_point('element', '/>', -1)
+lex:add_fold_point('unknown_element', '<', disambiguate_lt)
+lex:add_fold_point('unknown_element', '/>', -1)
+lex:add_fold_point(lexer.COMMENT, '<!--', '-->')
+
+-- Tags that start embedded languages.
+-- Export these patterns for proxy lexers (e.g. ASP) that need them.
+lex.embed_start_tag = element *
+ (ws * attribute * ws^-1 * equals * ws^-1 * string)^0 *
+ ws^-1 * tag_close
+lex.embed_end_tag = element * tag_close
+
+-- Embedded CSS (<style type="text/css"> ... </style>).
+local css = lexer.load('css')
+local style_element = word_match([[style]], true)
+local css_start_rule = #(P('<') * style_element *
+ ('>' + P(function(input, index)
+ if input:find('^%s+type%s*=%s*(["\'])text/css%1', index) then
+ return index
+ end
+end))) * lex.embed_start_tag
+local css_end_rule = #('</' * style_element * ws^-1 * '>') * lex.embed_end_tag
+lex:embed(css, css_start_rule, css_end_rule)
+
+-- Embedded JavaScript (<script type="text/javascript"> ... </script>).
+local js = lexer.load('javascript')
+local script_element = word_match([[script]], true)
+local js_start_rule = #(P('<') * script_element *
+ ('>' + P(function(input, index)
+ if input:find('^%s+type%s*=%s*(["\'])text/javascript%1', index) then
+ return index
+ end
+end))) * lex.embed_start_tag
+local js_end_rule = #('</' * script_element * ws^-1 * '>') * lex.embed_end_tag
+local js_line_comment = '//' * (lexer.nonnewline_esc - js_end_rule)^0
+local js_block_comment = '/*' * (lexer.any - '*/' - js_end_rule)^0 * P('*/')^-1
+js:modify_rule('comment', token(lexer.COMMENT, js_line_comment +
+ js_block_comment))
+lex:embed(js, js_start_rule, js_end_rule)
+
+-- Embedded CoffeeScript (<script type="text/coffeescript"> ... </script>).
+local cs = lexer.load('coffeescript')
+local script_element = word_match([[script]], true)
+local cs_start_rule = #(P('<') * script_element * P(function(input, index)
+ if input:find('^[^>]+type%s*=%s*(["\'])text/coffeescript%1', index) then
+ return index
+ end
+end)) * lex.embed_start_tag
+local cs_end_rule = #('</' * script_element * ws^-1 * '>') * lex.embed_end_tag
+lex:embed(cs, cs_start_rule, cs_end_rule)
+
+return lex
diff --git a/lexlua/html2.lua b/lexlua/html2.lua
new file mode 100644
index 000000000..ad1bd9c87
--- /dev/null
+++ b/lexlua/html2.lua
@@ -0,0 +1,147 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- HTML LPeg lexer.
+
+local l = require('lexer')
+local token, word_match = l.token, l.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lexer = l.new('html')
+
+-- Whitespace.
+local ws = token(l.WHITESPACE, l.space^1)
+lexer:add_rule('whitespace', ws)
+
+-- Comments.
+lexer:add_rule('comment',
+ token(l.COMMENT, '<!--' * (l.any - '-->')^0 * P('-->')^-1))
+
+-- Doctype.
+lexer:add_rule('doctype', token('doctype', '<!' * word_match('doctype', true) *
+ (l.any - '>')^1 * '>'))
+lexer:add_style('doctype', l.STYLE_COMMENT)
+
+-- Elements.
+local known_element = token('element', '<' * P('/')^-1 * word_match([[
+ a abbr address area article aside audio b base bdi bdo blockquote body
+ br button canvas caption cite code col colgroup content data datalist dd
+ decorator del details dfn div dl dt element em embed fieldset figcaption
+ figure footer form h1 h2 h3 h4 h5 h6 head header hr html i iframe img input
+ ins kbd keygen label legend li link main map mark menu menuitem meta meter
+ nav noscript object ol optgroup option output p param pre progress q rp rt
+ ruby s samp script section select shadow small source spacer span strong
+ style sub summary sup table tbody td template textarea tfoot th thead time
+ title tr track u ul var video wbr
+]], true))
+lexer:add_style('element', l.STYLE_KEYWORD)
+local unknown_element = token('unknown_element', '<' * P('/')^-1 * l.word)
+lexer:add_style('unknown_element', l.STYLE_KEYWORD..',italics')
+local element = known_element + unknown_element
+lexer:add_rule('element', element)
+
+-- Closing tags.
+local tag_close = token('element', P('/')^-1 * '>')
+lexer:add_rule('tag_close', tag_close)
+
+-- Attributes.
+local known_attribute = token('attribute', word_match([[
+ accept accept-charset accesskey action align alt async autocomplete autofocus
+ autoplay bgcolor border buffered challenge charset checked cite class code
+ codebase color cols colspan content contenteditable contextmenu controls
+ coords data data- datetime default defer dir dirname disabled download
+ draggable dropzone enctype for form headers height hidden high href hreflang
+ http-equiv icon id ismap itemprop keytype kind label lang language list
+ loop low manifest max maxlength media method min multiple name novalidate
+ open optimum pattern ping placeholder poster preload pubdate radiogroup
+ readonly rel required reversed role rows rowspan sandbox scope scoped
+ seamless selected shape size sizes span spellcheck src srcdoc srclang
+ start step style summary tabindex target title type usemap value width wrap
+]], true) + ((P('data-') + 'aria-') * (l.alnum + '-')^1))
+lexer:add_style('attribute', l.STYLE_TYPE)
+local unknown_attribute = token('unknown_attribute', l.word)
+lexer:add_style('unknown_attribute', l.STYLE_TYPE..',italics')
+local attribute = (known_attribute + unknown_attribute) * #(l.space^0 * '=')
+lexer:add_rule('attribute', attribute)
+
+-- TODO: performance is terrible on large files.
+local in_tag = P(function(input, index)
+ local before = input:sub(1, index - 1)
+ local s, e = before:find('<[^>]-$'), before:find('>[^<]-$')
+ if s and e then return s > e and index or nil end
+ if s then return index end
+ return input:find('^[^<]->', index) and index or nil
+end)
+
+-- Equals.
+local equals = token(l.OPERATOR, '=') --* in_tag
+--lexer:add_rule('equals', equals)
+
+-- Strings.
+local sq_str = l.delimited_range("'")
+local dq_str = l.delimited_range('"')
+local string = #S('\'"') * l.last_char_includes('=') *
+ token(l.STRING, sq_str + dq_str)
+lexer:add_rule('string', string)
+
+-- Numbers.
+lexer:add_rule('number', #l.digit * l.last_char_includes('=') *
+ token(l.NUMBER, l.digit^1 * P('%')^-1)) --* in_tag)
+
+-- Entities.
+lexer:add_rule('entity', token('entity', '&' * (l.any - l.space - ';')^1 * ';'))
+lexer:add_style('entity', l.STYLE_COMMENT)
+
+-- Fold points.
+lexer:add_fold_point('element', '<', '</')
+lexer:add_fold_point('element', '<', '/>')
+lexer:add_fold_point('unknown_element', '<', '</')
+lexer:add_fold_point('unknown_element', '<', '/>')
+lexer:add_fold_point(l.COMMENT, '<!--', '-->')
+
+-- Tags that start embedded languages.
+lexer.embed_start_tag = element *
+ (ws * attribute * ws^-1 * equals * ws^-1 * string)^0 *
+ ws^-1 * tag_close
+lexer.embed_end_tag = element * tag_close
+
+-- Embedded CSS.
+local css = l.load('css')
+local style_element = word_match('style', true)
+local css_start_rule = #(P('<') * style_element *
+ ('>' + P(function(input, index)
+ if input:find('^%s+type%s*=%s*(["\'])text/css%1', index) then
+ return index
+ end
+end))) * lexer.embed_start_tag -- <style type="text/css">
+local css_end_rule = #('</' * style_element * ws^-1 * '>') *
+ lexer.embed_end_tag -- </style>
+lexer:embed(css, css_start_rule, css_end_rule)
+
+-- Embedded JavaScript.
+local js = l.load('javascript')
+local script_element = word_match('script', true)
+local js_start_rule = #(P('<') * script_element *
+ ('>' + P(function(input, index)
+ if input:find('^%s+type%s*=%s*(["\'])text/javascript%1', index) then
+ return index
+ end
+end))) * lexer.embed_start_tag -- <script type="text/javascript">
+local js_end_rule = #('</' * script_element * ws^-1 * '>') *
+ lexer.embed_end_tag -- </script>
+local js_line_comment = '//' * (l.nonnewline_esc - js_end_rule)^0
+local js_block_comment = '/*' * (l.any - '*/' - js_end_rule)^0 * P('*/')^-1
+js:modify_rule('comment', token(l.COMMENT, js_line_comment + js_block_comment))
+lexer:embed(js, js_start_rule, js_end_rule)
+
+-- Embedded CoffeeScript.
+local cs = l.load('coffeescript')
+local script_element = word_match('script', true)
+local cs_start_rule = #(P('<') * script_element * P(function(input, index)
+ if input:find('^[^>]+type%s*=%s*(["\'])text/coffeescript%1', index) then
+ return index
+ end
+end)) * lexer.embed_start_tag -- <script type="text/coffeescript">
+local cs_end_rule = #('</' * script_element * ws^-1 * '>') *
+ lexer.embed_end_tag -- </script>
+lexer:embed(cs, cs_start_rule, cs_end_rule)
+
+return lexer
diff --git a/lexlua/icon.lua b/lexlua/icon.lua
new file mode 100644
index 000000000..2759b3946
--- /dev/null
+++ b/lexlua/icon.lua
@@ -0,0 +1,61 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- LPeg lexer for the Icon programming language.
+-- http://www.cs.arizona.edu/icon
+-- Contributed by Carl Sturtivant.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('icon')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break by case create default do else end every fail global if initial
+ invocable link local next not of procedure record repeat return static suspend
+ then to until while
+]]))
+
+-- Icon Keywords: unique to Icon.
+lex:add_rule('special_keyword', token('special_keyword', P('&') * word_match[[
+ allocated ascii clock collections cset current date dateline digits dump e
+ error errornumber errortext errorvalue errout fail features file host input
+ lcase letters level line main null output phi pi pos progname random regions
+ source storage subject time trace ucase version
+]]))
+lex:add_style('special_keyword', lexer.STYLE_TYPE)
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline_esc^0))
+
+-- Numbers.
+local radix_literal = P('-')^-1 * lexer.dec_num * S('rR') * lexer.alnum^1
+lex:add_rule('number', token(lexer.NUMBER, radix_literal + lexer.float +
+ lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define else endif error ifdef ifndef include line undef
+]]
+lex:add_rule('preproc', token(lexer.PREPROCESSOR, P('$') * preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~@:;,.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif')
+lex:add_fold_point(lexer.KEYWORD, 'procedure', 'end')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/idl.lua b/lexlua/idl.lua
new file mode 100644
index 000000000..cf2b6c8aa
--- /dev/null
+++ b/lexlua/idl.lua
@@ -0,0 +1,51 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- IDL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('idl')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract attribute case const context custom default enum exception factory
+ FALSE in inout interface local module native oneway out private public raises
+ readonly struct support switch TRUE truncatable typedef union valuetype
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ any boolean char double fixed float long Object octet sequence short string
+ unsigned ValueBase void wchar wstring
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define undef ifdef ifndef if elif else endif include warning pragma
+]]
+lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!<>=+-/*%&|^~.,:;?()[]{}')))
+
+return lex
diff --git a/lexlua/inform.lua b/lexlua/inform.lua
new file mode 100644
index 000000000..59f9a26b0
--- /dev/null
+++ b/lexlua/inform.lua
@@ -0,0 +1,72 @@
+-- Copyright 2010-2018 Jeff Stone. See License.txt.
+-- Inform LPeg lexer for Scintilla.
+-- JMS 2010-04-25.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('inform')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ Abbreviate Array Attribute Class Constant Default End Endif Extend Global
+ Ifdef Iffalse Ifndef Ifnot Iftrue Import Include Link Lowstring Message Object
+ Property Release Replace Serial StartDaemon Statusline StopDaemon Switches
+ Verb
+ absent action actor add_to_scope address additive after and animate article
+ articles before bold box break cant_go capacity char class child children
+ clothing concealed container continue creature daemon deadflag default
+ describe description do door door_dir door_to d_to d_obj e_to e_obj each_turn
+ edible else enterable false female first font for found_in general give
+ grammar has hasnt held if in in_to in_obj initial inside_description invent
+ jump last life light list_together location lockable locked male move moved
+ multi multiexcept multiheld multiinside n_to n_obj ne_to ne_obj nw_to nw_obj
+ name neuter new_line nothing notin noun number objectloop ofclass off on only
+ open openable or orders out_to out_obj parent parse_name player plural
+ pluralname print print_ret private proper provides random react_after
+ react_before remove replace return reverse rfalseroman rtrue s_to s_obj se_to
+ se_obj sw_to sw_obj scenery scope score scored second self short_name
+ short_name_indef sibling spaces static string style supporter switch
+ switchable talkable thedark time_left time_out to topic transparent true
+ underline u_to u_obj visited w_to w_obj when_closed when_off when_on when_open
+ while with with_key workflag worn
+]]))
+
+-- Library actions.
+lex:add_rule('action', token('action', word_match[[
+ Answer Ask AskFor Attack Blow Burn Buy Climb Close Consult Cut Dig Disrobe
+ Drink Drop Eat Empty EmptyT Enter Examine Exit Fill FullScore GetOff Give Go
+ GoIn Insert Inv InvTall InvWide Jump JumpOver Kiss LetGo Listen LMode1 LMode2
+ LMode3 Lock Look LookUnder Mild No NotifyOff NotifyOn Objects Open Order
+ Places Pray Pronouns Pull Push PushDir PutOn Quit Receive Remove Restart
+ Restore Rub Save Score ScriptOff ScriptOn Search Set SetTo Show Sing Sleep
+ Smell Sorry Squeeze Strong Swim Swing SwitchOff SwitchOn Take Taste Tell Think
+ ThrowAt ThrownAt Tie Touch Transfer Turn Unlock VagueGo Verify Version Wait
+ Wake WakeOther Wave WaveHands Wear Yes
+]]))
+lex:add_style('action', lexer.STYLE_VARIABLE)
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '!' * lexer.nonnewline^0))
+
+-- Numbers.
+local inform_hex = '$' * lexer.xdigit^1
+local inform_bin = '$$' * S('01')^1
+lex:add_rule('number', token(lexer.NUMBER, lexer.integer + inform_hex +
+ inform_bin))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('@~=+-*/%^#=<>;:,.{}[]()&|?')))
+
+return lex
diff --git a/lexlua/ini.lua b/lexlua/ini.lua
new file mode 100644
index 000000000..907788405
--- /dev/null
+++ b/lexlua/ini.lua
@@ -0,0 +1,43 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Ini LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ini')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ true false on off yes no
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') *
+ (lexer.alnum + S('_.'))^0))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL,
+ lexer.delimited_range('[]', true, true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, lexer.starts_line(S(';#')) *
+ lexer.nonnewline^0))
+
+-- Numbers.
+local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0
+local oct_num = '0' * S('01234567_')^1
+local integer = S('+-')^-1 * (lexer.hex_num + oct_num + dec)
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, '='))
+
+return lex
diff --git a/lexlua/io_lang.lua b/lexlua/io_lang.lua
new file mode 100644
index 000000000..28772bec4
--- /dev/null
+++ b/lexlua/io_lang.lua
@@ -0,0 +1,51 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Io LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('io_lang')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ block method while foreach if else do super self clone proto setSlot hasSlot
+ type write print forward
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ Block Buffer CFunction Date Duration File Future LinkedList List Map Message
+ Nil Nop Number Object String WeakLink
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local tq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str))
+
+-- Comments.
+local line_comment = (P('#') + '//') * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('`~@$%^&*-+/=\\<>?.,:;()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/java.lua b/lexlua/java.lua
new file mode 100644
index 000000000..a85bf154e
--- /dev/null
+++ b/lexlua/java.lua
@@ -0,0 +1,66 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Java LPeg lexer.
+-- Modified by Brian Schott.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('java')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Classes.
+lex:add_rule('classdef', token(lexer.KEYWORD, P('class')) * ws *
+ token(lexer.CLASS, lexer.word))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract assert break case catch class const continue default do else enum
+ extends final finally for goto if implements import instanceof interface
+ native new package private protected public return static strictfp super
+ switch synchronized this throw throws transient try while volatile
+ -- Literals.
+ true false null
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ boolean byte char double float int long short void
+ Boolean Byte Character Double Float Integer Long Short String
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('('))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlFfDd')^-1))
+
+-- Annotations.
+lex:add_rule('annotation', token('annotation', '@' * lexer.word))
+lex:add_style('annotation', lexer.STYLE_PREPROCESSOR)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/javascript.lua b/lexlua/javascript.lua
new file mode 100644
index 000000000..11acab636
--- /dev/null
+++ b/lexlua/javascript.lua
@@ -0,0 +1,50 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- JavaScript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('javascript')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract boolean break byte case catch char class const continue debugger
+ default delete do double else enum export extends false final finally float
+ for function get goto if implements import in instanceof int interface let
+ long native new null of package private protected public return set short
+ static super switch synchronized this throw throws transient true try typeof
+ var void volatile while with yield
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Strings.
+local regex_str = #P('/') * lexer.last_char_includes('+-*%^!=&|?:;,([{<>') *
+ lexer.delimited_range('/', true) * S('igm')^0
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"') +
+ lexer.delimited_range('`')) +
+ token(lexer.REGEX, regex_str))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%^!=&|?:;,.()[]{}<>')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/json.lua b/lexlua/json.lua
new file mode 100644
index 000000000..d2b5cf35b
--- /dev/null
+++ b/lexlua/json.lua
@@ -0,0 +1,39 @@
+-- Copyright 2006-2018 Brian "Sir Alaran" Schott. See License.txt.
+-- JSON LPeg lexer.
+-- Based off of lexer code by Mitchell.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('json')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[true false null]]))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+local integer = S('+-')^-1 * lexer.digit^1 * S('Ll')^-1
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('[]{}:,')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/jsp.lua b/lexlua/jsp.lua
new file mode 100644
index 000000000..49f6625eb
--- /dev/null
+++ b/lexlua/jsp.lua
@@ -0,0 +1,20 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- JSP LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('jsp', {inherit = lexer.load('html')})
+
+-- Embedded Java.
+local java = lexer.load('java')
+local java_start_rule = token('jsp_tag', '<%' * P('=')^-1)
+local java_end_rule = token('jsp_tag', '%>')
+lex:embed(java, java_start_rule, java_end_rule, true)
+lex:add_style('jsp_tag', lexer.STYLE_EMBEDDED)
+
+-- Fold points.
+lex:add_fold_point('jsp_tag', '<%', '%>')
+
+return lex
diff --git a/lexlua/latex.lua b/lexlua/latex.lua
new file mode 100644
index 000000000..a1a0f6eb4
--- /dev/null
+++ b/lexlua/latex.lua
@@ -0,0 +1,58 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Latex LPeg lexer.
+-- Modified by Brian Schott.
+-- Modified by Robert Gieseke.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('latex')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+local line_comment = '%' * lexer.nonnewline^0
+local block_comment = '\\begin' * P(' ')^0 * '{comment}' *
+ (lexer.any - '\\end' * P(' ')^0 * '{comment}')^0 *
+ P('\\end' * P(' ')^0 * '{comment}')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Math environments.
+local math_word = word_match[[
+ align displaymath eqnarray equation gather math multline
+]]
+local math_begin_end = (P('begin') + P('end')) * P(' ')^0 *
+ '{' * math_word * P('*')^-1 * '}'
+lex:add_rule('math', token('math', '$' + '\\' * (S('[]()') + math_begin_end)))
+lex:add_style('math', lexer.STYLE_FUNCTION)
+
+-- LaTeX environments.
+lex:add_rule('environment', token('environment', '\\' *
+ (P('begin') + P('end')) *
+ P(' ')^0 * '{' * lexer.word *
+ P('*')^-1 * '}'))
+lex:add_style('environment', lexer.STYLE_KEYWORD)
+
+-- Sections.
+lex:add_rule('section', token('section', '\\' * word_match[[
+ part chapter section subsection subsubsection paragraph subparagraph
+]] * P('*')^-1))
+lex:add_style('section', lexer.STYLE_CLASS)
+
+-- Commands.
+lex:add_rule('command', token('command', '\\' *
+ (lexer.alpha^1 + S('#$&~_^%{}'))))
+lex:add_style('command', lexer.STYLE_KEYWORD)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('&#{}[]')))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '\\begin', '\\end')
+lex:add_fold_point(lexer.COMMENT, '%', lexer.fold_line_comments('%'))
+lex:add_fold_point('environment', '\\begin', '\\end')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+return lex
diff --git a/lexlua/ledger.lua b/lexlua/ledger.lua
new file mode 100644
index 000000000..a697a6d8b
--- /dev/null
+++ b/lexlua/ledger.lua
@@ -0,0 +1,48 @@
+-- Copyright 2015-2018 Charles Lehner. See License.txt.
+-- ledger journal LPeg lexer, see http://www.ledger-cli.org/
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ledger', {lex_by_line = true})
+
+local delim = P('\t') + P(' ')
+
+-- Account.
+lex:add_rule('account', token(lexer.VARIABLE,
+ lexer.starts_line(S(' \t')^1 *
+ (lexer.print - delim)^1)))
+
+-- Amount.
+lex:add_rule('amount', token(lexer.NUMBER, delim * (1 - S(';\r\n'))^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, S(';#') * lexer.nonnewline^0))
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local label = lexer.delimited_range('[]', true, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + label))
+
+-- Date.
+lex:add_rule('date', token(lexer.CONSTANT,
+ lexer.starts_line((lexer.digit + S('/-'))^1)))
+
+-- Automated transactions.
+lex:add_rule('auto_tx', token(lexer.PREPROCESSOR,
+ lexer.starts_line(S('=~') * lexer.nonnewline^0)))
+
+-- Directives.
+local directive_word = word_match[[
+ account alias assert bucket capture check comment commodity define end fixed
+ endfixed include payee apply tag test year
+]] + S('AYNDCIiOobh')
+lex:add_rule('directive', token(lexer.KEYWORD,
+ lexer.starts_line(S('!@')^-1 * directive_word)))
+
+return lex
diff --git a/lexlua/less.lua b/lexlua/less.lua
new file mode 100644
index 000000000..8544f9f78
--- /dev/null
+++ b/lexlua/less.lua
@@ -0,0 +1,21 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- Less CSS LPeg lexer.
+-- http://lesscss.org
+
+local lexer = require('lexer')
+local token = lexer.token
+local S = lpeg.S
+
+local lex = lexer.new('less', {inherit = lexer.load('css')})
+
+-- Line comments.
+lex:add_rule('line_comment', token(lexer.COMMENT, '//' * lexer.nonnewline^0))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '@' *
+ (lexer.alnum + S('_-{}'))^1))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/lexer.lua b/lexlua/lexer.lua
new file mode 100644
index 000000000..6c063fcb5
--- /dev/null
+++ b/lexlua/lexer.lua
@@ -0,0 +1,1865 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+
+local M = {}
+
+--[=[ This comment is for LuaDoc.
+---
+-- Lexes Scintilla documents and source code with Lua and LPeg.
+--
+-- ## Writing Lua Lexers
+--
+-- Lexers highlight the syntax of source code. Scintilla (the editing component
+-- behind [Textadept][]) traditionally uses static, compiled C++ lexers which
+-- are notoriously difficult to create and/or extend. On the other hand, Lua
+-- makes it easy to to rapidly create new lexers, extend existing ones, and
+-- embed lexers within one another. Lua lexers tend to be more readable than C++
+-- lexers too.
+--
+-- Lexers are Parsing Expression Grammars, or PEGs, composed with the Lua
+-- [LPeg library][]. The following table comes from the LPeg documentation and
+-- summarizes all you need to know about constructing basic LPeg patterns. This
+-- module provides convenience functions for creating and working with other
+-- more advanced patterns and concepts.
+--
+-- Operator | Description
+-- ---------------------|------------
+-- `lpeg.P(string)` | Matches `string` literally.
+-- `lpeg.P(`_`n`_`)` | Matches exactly _`n`_ characters.
+-- `lpeg.S(string)` | Matches any character in set `string`.
+-- `lpeg.R("`_`xy`_`")` | Matches any character between range `x` and `y`.
+-- `patt^`_`n`_ | Matches at least _`n`_ repetitions of `patt`.
+-- `patt^-`_`n`_ | Matches at most _`n`_ repetitions of `patt`.
+-- `patt1 * patt2` | Matches `patt1` followed by `patt2`.
+-- `patt1 + patt2` | Matches `patt1` or `patt2` (ordered choice).
+-- `patt1 - patt2` | Matches `patt1` if `patt2` does not match.
+-- `-patt` | Equivalent to `("" - patt)`.
+-- `#patt` | Matches `patt` but consumes no input.
+--
+-- The first part of this document deals with rapidly constructing a simple
+-- lexer. The next part deals with more advanced techniques, such as custom
+-- coloring and embedding lexers within one another. Following that is a
+-- discussion about code folding, or being able to tell Scintilla which code
+-- blocks are "foldable" (temporarily hideable from view). After that are
+-- instructions on how to use Lua lexers with the aforementioned Textadept
+-- editor. Finally there are comments on lexer performance and limitations.
+--
+-- [LPeg library]: http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html
+-- [Textadept]: http://foicica.com/textadept
+--
+-- ### Lexer Basics
+--
+-- The *lexers/* directory contains all lexers, including your new one. Before
+-- attempting to write one from scratch though, first determine if your
+-- programming language is similar to any of the 100+ languages supported. If
+-- so, you may be able to copy and modify that lexer, saving some time and
+-- effort. The filename of your lexer should be the name of your programming
+-- language in lower case followed by a *.lua* extension. For example, a new Lua
+-- lexer has the name *lua.lua*.
+--
+-- Note: Try to refrain from using one-character language names like "c", "d",
+-- or "r". For example, Lua lexers for those language names are named "ansi_c",
+-- "dmd", and "rstats", respectively.
+--
+-- #### New Lexer Template
+--
+-- There is a *lexers/template.txt* file that contains a simple template for a
+-- new lexer. Feel free to use it, replacing the '?'s with the name of your
+-- lexer. Consider this snippet from the template:
+--
+-- -- ? LPeg lexer.
+--
+-- local lexer = require('lexer')
+-- local token, word_match = lexer.token, lexer.word_match
+-- local P, R, S = lpeg.P, lpeg.R, lpeg.S
+--
+-- local lex = lexer.new('?')
+--
+-- -- Whitespace.
+-- local ws = token(lexer.WHITESPACE, lexer.space^1)
+-- lex:add_rule('whitespace', ws)
+--
+-- [...]
+--
+-- return lex
+--
+-- The first 3 lines of code simply define often used convenience variables. The
+-- fourth and last lines [define](#lexer.new) and return the lexer object
+-- Scintilla uses; they are very important and must be part of every lexer. The
+-- fifth line defines something called a "token", an essential building block of
+-- lexers. You will learn about tokens shortly. The sixth line defines a lexer
+-- grammar rule, which you will learn about later, as well as token styles. (Be
+-- aware that it is common practice to combine these two lines for short rules.)
+-- Note, however, the `local` prefix in front of variables, which is needed
+-- so-as not to affect Lua's global environment. All in all, this is a minimal,
+-- working lexer that you can build on.
+--
+-- #### Tokens
+--
+-- Take a moment to think about your programming language's structure. What kind
+-- of key elements does it have? In the template shown earlier, one predefined
+-- element all languages have is whitespace. Your language probably also has
+-- elements like comments, strings, and keywords. Lexers refer to these elements
+-- as "tokens". Tokens are the fundamental "building blocks" of lexers. Lexers
+-- break down source code into tokens for coloring, which results in the syntax
+-- highlighting familiar to you. It is up to you how specific your lexer is when
+-- it comes to tokens. Perhaps only distinguishing between keywords and
+-- identifiers is necessary, or maybe recognizing constants and built-in
+-- functions, methods, or libraries is desirable. The Lua lexer, for example,
+-- defines 11 tokens: whitespace, keywords, built-in functions, constants,
+-- built-in libraries, identifiers, strings, comments, numbers, labels, and
+-- operators. Even though constants, built-in functions, and built-in libraries
+-- are subsets of identifiers, Lua programmers find it helpful for the lexer to
+-- distinguish between them all. It is perfectly acceptable to just recognize
+-- keywords and identifiers.
+--
+-- In a lexer, tokens consist of a token name and an LPeg pattern that matches a
+-- sequence of characters recognized as an instance of that token. Create tokens
+-- using the [`lexer.token()`]() function. Let us examine the "whitespace" token
+-- defined in the template shown earlier:
+--
+-- local ws = token(lexer.WHITESPACE, lexer.space^1)
+--
+-- At first glance, the first argument does not appear to be a string name and
+-- the second argument does not appear to be an LPeg pattern. Perhaps you
+-- expected something like:
+--
+-- local ws = token('whitespace', S('\t\v\f\n\r ')^1)
+--
+-- The `lexer` module actually provides a convenient list of common token names
+-- and common LPeg patterns for you to use. Token names include
+-- [`lexer.DEFAULT`](), [`lexer.WHITESPACE`](), [`lexer.COMMENT`](),
+-- [`lexer.STRING`](), [`lexer.NUMBER`](), [`lexer.KEYWORD`](),
+-- [`lexer.IDENTIFIER`](), [`lexer.OPERATOR`](), [`lexer.ERROR`](),
+-- [`lexer.PREPROCESSOR`](), [`lexer.CONSTANT`](), [`lexer.VARIABLE`](),
+-- [`lexer.FUNCTION`](), [`lexer.CLASS`](), [`lexer.TYPE`](), [`lexer.LABEL`](),
+-- [`lexer.REGEX`](), and [`lexer.EMBEDDED`](). Patterns include
+-- [`lexer.any`](), [`lexer.ascii`](), [`lexer.extend`](), [`lexer.alpha`](),
+-- [`lexer.digit`](), [`lexer.alnum`](), [`lexer.lower`](), [`lexer.upper`](),
+-- [`lexer.xdigit`](), [`lexer.cntrl`](), [`lexer.graph`](), [`lexer.print`](),
+-- [`lexer.punct`](), [`lexer.space`](), [`lexer.newline`](),
+-- [`lexer.nonnewline`](), [`lexer.nonnewline_esc`](), [`lexer.dec_num`](),
+-- [`lexer.hex_num`](), [`lexer.oct_num`](), [`lexer.integer`](),
+-- [`lexer.float`](), and [`lexer.word`](). You may use your own token names if
+-- none of the above fit your language, but an advantage to using predefined
+-- token names is that your lexer's tokens will inherit the universal syntax
+-- highlighting color theme used by your text editor.
+--
+-- ##### Example Tokens
+--
+-- So, how might you define other tokens like keywords, comments, and strings?
+-- Here are some examples.
+--
+-- **Keywords**
+--
+-- Instead of matching _n_ keywords with _n_ `P('keyword_`_`n`_`')` ordered
+-- choices, use another convenience function: [`lexer.word_match()`](). It is
+-- much easier and more efficient to write word matches like:
+--
+-- local keyword = token(lexer.KEYWORD, lexer.word_match[[
+-- keyword_1 keyword_2 ... keyword_n
+-- ]])
+--
+-- local case_insensitive_keyword = token(lexer.KEYWORD, lexer.word_match([[
+-- KEYWORD_1 keyword_2 ... KEYword_n
+-- ]], true))
+--
+-- local hyphened_keyword = token(lexer.KEYWORD, lexer.word_match[[
+-- keyword-1 keyword-2 ... keyword-n
+-- ]])
+--
+-- In order to more easily separate or categorize keyword sets, you can use Lua
+-- line comments within keyword strings. Such comments will be ignored. For
+-- example:
+--
+-- local keyword = token(lexer.KEYWORD, lexer.word_match[[
+-- -- Version 1 keywords.
+-- keyword_11, keyword_12 ... keyword_1n
+-- -- Version 2 keywords.
+-- keyword_21, keyword_22 ... keyword_2n
+-- ...
+-- -- Version N keywords.
+-- keyword_m1, keyword_m2 ... keyword_mn
+-- ]])
+--
+-- **Comments**
+--
+-- Line-style comments with a prefix character(s) are easy to express with LPeg:
+--
+-- local shell_comment = token(lexer.COMMENT, '#' * lexer.nonnewline^0)
+-- local c_line_comment = token(lexer.COMMENT,
+-- '//' * lexer.nonnewline_esc^0)
+--
+-- The comments above start with a '#' or "//" and go to the end of the line.
+-- The second comment recognizes the next line also as a comment if the current
+-- line ends with a '\' escape character.
+--
+-- C-style "block" comments with a start and end delimiter are also easy to
+-- express:
+--
+-- local c_comment = token(lexer.COMMENT,
+-- '/*' * (lexer.any - '*/')^0 * P('*/')^-1)
+--
+-- This comment starts with a "/\*" sequence and contains anything up to and
+-- including an ending "\*/" sequence. The ending "\*/" is optional so the lexer
+-- can recognize unfinished comments as comments and highlight them properly.
+--
+-- **Strings**
+--
+-- It is tempting to think that a string is not much different from the block
+-- comment shown above in that both have start and end delimiters:
+--
+-- local dq_str = '"' * (lexer.any - '"')^0 * P('"')^-1
+-- local sq_str = "'" * (lexer.any - "'")^0 * P("'")^-1
+-- local simple_string = token(lexer.STRING, dq_str + sq_str)
+--
+-- However, most programming languages allow escape sequences in strings such
+-- that a sequence like "\\&quot;" in a double-quoted string indicates that the
+-- '&quot;' is not the end of the string. The above token incorrectly matches
+-- such a string. Instead, use the [`lexer.delimited_range()`]() convenience
+-- function.
+--
+-- local dq_str = lexer.delimited_range('"')
+-- local sq_str = lexer.delimited_range("'")
+-- local string = token(lexer.STRING, dq_str + sq_str)
+--
+-- In this case, the lexer treats '\' as an escape character in a string
+-- sequence.
+--
+-- **Numbers**
+--
+-- Most programming languages have the same format for integer and float tokens,
+-- so it might be as simple as using a couple of predefined LPeg patterns:
+--
+-- local number = token(lexer.NUMBER, lexer.float + lexer.integer)
+--
+-- However, some languages allow postfix characters on integers.
+--
+-- local integer = P('-')^-1 * (lexer.dec_num * S('lL')^-1)
+-- local number = token(lexer.NUMBER, lexer.float + lexer.hex_num + integer)
+--
+-- Your language may need other tweaks, but it is up to you how fine-grained you
+-- want your highlighting to be. After all, you are not writing a compiler or
+-- interpreter!
+--
+-- #### Rules
+--
+-- Programming languages have grammars, which specify valid token structure. For
+-- example, comments usually cannot appear within a string. Grammars consist of
+-- rules, which are simply combinations of tokens. Recall from the lexer
+-- template the [`lexer.add_rule()`]() call, which adds a rule to the lexer's
+-- grammar:
+--
+-- lex:add_rule('whitespace', ws)
+--
+-- Each rule has an associated name, but rule names are completely arbitrary and
+-- serve only to identify and distinguish between different rules. Rule order is
+-- important: if text does not match the first rule added to the grammar, the
+-- lexer tries to match the second rule added, and so on. Right now this lexer
+-- simply matches whitespace tokens under a rule named "whitespace".
+--
+-- To illustrate the importance of rule order, here is an example of a
+-- simplified Lua lexer:
+--
+-- lex:add_rule('whitespace', token(lexer.WHITESPACE, ...))
+-- lex:add_rule('keyword', token(lexer.KEYWORD, ...))
+-- lex:add_rule('identifier', token(lexer.IDENTIFIER, ...))
+-- lex:add_rule('string', token(lexer.STRING, ...))
+-- lex:add_rule('comment', token(lexer.COMMENT, ...))
+-- lex:add_rule('number', token(lexer.NUMBER, ...))
+-- lex:add_rule('label', token(lexer.LABEL, ...))
+-- lex:add_rule('operator', token(lexer.OPERATOR, ...))
+--
+-- Note how identifiers come after keywords. In Lua, as with most programming
+-- languages, the characters allowed in keywords and identifiers are in the same
+-- set (alphanumerics plus underscores). If the lexer added the "identifier"
+-- rule before the "keyword" rule, all keywords would match identifiers and thus
+-- incorrectly highlight as identifiers instead of keywords. The same idea
+-- applies to function, constant, etc. tokens that you may want to distinguish
+-- between: their rules should come before identifiers.
+--
+-- So what about text that does not match any rules? For example in Lua, the '!'
+-- character is meaningless outside a string or comment. Normally the lexer
+-- skips over such text. If instead you want to highlight these "syntax errors",
+-- add an additional end rule:
+--
+-- lex:add_rule('whitespace', ws)
+-- ...
+-- lex:add_rule('error', token(lexer.ERROR, lexer.any))
+--
+-- This identifies and highlights any character not matched by an existing
+-- rule as a `lexer.ERROR` token.
+--
+-- Even though the rules defined in the examples above contain a single token,
+-- rules may consist of multiple tokens. For example, a rule for an HTML tag
+-- could consist of a tag token followed by an arbitrary number of attribute
+-- tokens, allowing the lexer to highlight all tokens separately. That rule
+-- might look something like this:
+--
+-- lex:add_rule('tag', tag_start * (ws * attributes)^0 * tag_end^-1)
+--
+-- Note however that lexers with complex rules like these are more prone to lose
+-- track of their state, especially if they span multiple lines.
+--
+-- #### Summary
+--
+-- Lexers primarily consist of tokens and grammar rules. At your disposal are a
+-- number of convenience patterns and functions for rapidly creating a lexer. If
+-- you choose to use predefined token names for your tokens, you do not have to
+-- define how the lexer highlights them. The tokens will inherit the default
+-- syntax highlighting color theme your editor uses.
+--
+-- ### Advanced Techniques
+--
+-- #### Styles and Styling
+--
+-- The most basic form of syntax highlighting is assigning different colors to
+-- different tokens. Instead of highlighting with just colors, Scintilla allows
+-- for more rich highlighting, or "styling", with different fonts, font sizes,
+-- font attributes, and foreground and background colors, just to name a few.
+-- The unit of this rich highlighting is called a "style". Styles are simply
+-- strings of comma-separated property settings. By default, lexers associate
+-- predefined token names like `lexer.WHITESPACE`, `lexer.COMMENT`,
+-- `lexer.STRING`, etc. with particular styles as part of a universal color
+-- theme. These predefined styles include [`lexer.STYLE_CLASS`](),
+-- [`lexer.STYLE_COMMENT`](), [`lexer.STYLE_CONSTANT`](),
+-- [`lexer.STYLE_ERROR`](), [`lexer.STYLE_EMBEDDED`](),
+-- [`lexer.STYLE_FUNCTION`](), [`lexer.STYLE_IDENTIFIER`](),
+-- [`lexer.STYLE_KEYWORD`](), [`lexer.STYLE_LABEL`](), [`lexer.STYLE_NUMBER`](),
+-- [`lexer.STYLE_OPERATOR`](), [`lexer.STYLE_PREPROCESSOR`](),
+-- [`lexer.STYLE_REGEX`](), [`lexer.STYLE_STRING`](), [`lexer.STYLE_TYPE`](),
+-- [`lexer.STYLE_VARIABLE`](), and [`lexer.STYLE_WHITESPACE`](). Like with
+-- predefined token names and LPeg patterns, you may define your own styles. At
+-- their core, styles are just strings, so you may create new ones and/or modify
+-- existing ones. Each style consists of the following comma-separated settings:
+--
+-- Setting | Description
+-- ---------------|------------
+-- font:_name_ | The name of the font the style uses.
+-- size:_int_ | The size of the font the style uses.
+-- [not]bold | Whether or not the font face is bold.
+-- weight:_int_ | The weight or boldness of a font, between 1 and 999.
+-- [not]italics | Whether or not the font face is italic.
+-- [not]underlined| Whether or not the font face is underlined.
+-- fore:_color_ | The foreground color of the font face.
+-- back:_color_ | The background color of the font face.
+-- [not]eolfilled | Does the background color extend to the end of the line?
+-- case:_char_ | The case of the font ('u': upper, 'l': lower, 'm': normal).
+-- [not]visible | Whether or not the text is visible.
+-- [not]changeable| Whether the text is changeable or read-only.
+--
+-- Specify font colors in either "#RRGGBB" format, "0xBBGGRR" format, or the
+-- decimal equivalent of the latter. As with token names, LPeg patterns, and
+-- styles, there is a set of predefined color names, but they vary depending on
+-- the current color theme in use. Therefore, it is generally not a good idea to
+-- manually define colors within styles in your lexer since they might not fit
+-- into a user's chosen color theme. Try to refrain from even using predefined
+-- colors in a style because that color may be theme-specific. Instead, the best
+-- practice is to either use predefined styles or derive new color-agnostic
+-- styles from predefined ones. For example, Lua "longstring" tokens use the
+-- existing `lexer.STYLE_STRING` style instead of defining a new one.
+--
+-- ##### Example Styles
+--
+-- Defining styles is pretty straightforward. An empty style that inherits the
+-- default theme settings is simply an empty string:
+--
+-- local style_nothing = ''
+--
+-- A similar style but with a bold font face looks like this:
+--
+-- local style_bold = 'bold'
+--
+-- If you want the same style, but also with an italic font face, define the new
+-- style in terms of the old one:
+--
+-- local style_bold_italic = style_bold..',italics'
+--
+-- This allows you to derive new styles from predefined ones without having to
+-- rewrite them. This operation leaves the old style unchanged. Thus if you
+-- had a "static variable" token whose style you wanted to base off of
+-- `lexer.STYLE_VARIABLE`, it would probably look like:
+--
+-- local style_static_var = lexer.STYLE_VARIABLE..',italics'
+--
+-- The color theme files in the *lexers/themes/* folder give more examples of
+-- style definitions.
+--
+-- #### Token Styles
+--
+-- Lexers use the [`lexer.add_style()`]() function to assign styles to
+-- particular tokens. Recall the token definition and from the lexer template:
+--
+-- local ws = token(lexer.WHITESPACE, lexer.space^1)
+-- lex:add_rule('whitespace', ws)
+--
+-- Why is a style not assigned to the `lexer.WHITESPACE` token? As mentioned
+-- earlier, lexers automatically associate tokens that use predefined token
+-- names with a particular style. Only tokens with custom token names need
+-- manual style associations. As an example, consider a custom whitespace token:
+--
+-- local ws = token('custom_whitespace', lexer.space^1)
+--
+-- Assigning a style to this token looks like:
+--
+-- lex:add_style('custom_whitespace', lexer.STYLE_WHITESPACE)
+--
+-- Do not confuse token names with rule names. They are completely different
+-- entities. In the example above, the lexer associates the "custom_whitespace"
+-- token with the existing style for `lexer.WHITESPACE` tokens. If instead you
+-- prefer to color the background of whitespace a shade of grey, it might look
+-- like:
+--
+-- local custom_style = lexer.STYLE_WHITESPACE..',back:$(color.grey)'
+-- lex:add_style('custom_whitespace', custom_style)
+--
+-- Notice that the lexer peforms Scintilla-style "$()" property expansion. You
+-- may also use "%()". Remember to refrain from assigning specific colors in
+-- styles, but in this case, all user color themes probably define the
+-- "color.grey" property.
+--
+-- #### Line Lexers
+--
+-- By default, lexers match the arbitrary chunks of text passed to them by
+-- Scintilla. These chunks may be a full document, only the visible part of a
+-- document, or even just portions of lines. Some lexers need to match whole
+-- lines. For example, a lexer for the output of a file "diff" needs to know if
+-- the line started with a '+' or '-' and then style the entire line
+-- accordingly. To indicate that your lexer matches by line, create the lexer
+-- with an extra parameter:
+--
+-- local lex = lexer.new('?', {lex_by_line = true})
+--
+-- Now the input text for the lexer is a single line at a time. Keep in mind
+-- that line lexers do not have the ability to look ahead at subsequent lines.
+--
+-- #### Embedded Lexers
+--
+-- Lexers embed within one another very easily, requiring minimal effort. In the
+-- following sections, the lexer being embedded is called the "child" lexer and
+-- the lexer a child is being embedded in is called the "parent". For example,
+-- consider an HTML lexer and a CSS lexer. Either lexer stands alone for styling
+-- their respective HTML and CSS files. However, CSS can be embedded inside
+-- HTML. In this specific case, the CSS lexer is the "child" lexer with the HTML
+-- lexer being the "parent". Now consider an HTML lexer and a PHP lexer. This
+-- sounds a lot like the case with CSS, but there is a subtle difference: PHP
+-- _embeds itself into_ HTML while CSS is _embedded in_ HTML. This fundamental
+-- difference results in two types of embedded lexers: a parent lexer that
+-- embeds other child lexers in it (like HTML embedding CSS), and a child lexer
+-- that embeds itself into a parent lexer (like PHP embedding itself in HTML).
+--
+-- ##### Parent Lexer
+--
+-- Before embedding a child lexer into a parent lexer, the parent lexer needs to
+-- load the child lexer. This is done with the [`lexer.load()`]() function. For
+-- example, loading the CSS lexer within the HTML lexer looks like:
+--
+-- local css = lexer.load('css')
+--
+-- The next part of the embedding process is telling the parent lexer when to
+-- switch over to the child lexer and when to switch back. The lexer refers to
+-- these indications as the "start rule" and "end rule", respectively, and are
+-- just LPeg patterns. Continuing with the HTML/CSS example, the transition from
+-- HTML to CSS is when the lexer encounters a "style" tag with a "type"
+-- attribute whose value is "text/css":
+--
+-- local css_tag = P('<style') * P(function(input, index)
+-- if input:find('^[^>]+type="text/css"', index) then
+-- return index
+-- end
+-- end)
+--
+-- This pattern looks for the beginning of a "style" tag and searches its
+-- attribute list for the text "`type="text/css"`". (In this simplified example,
+-- the Lua pattern does not consider whitespace between the '=' nor does it
+-- consider that using single quotes is valid.) If there is a match, the
+-- functional pattern returns a value instead of `nil`. In this case, the value
+-- returned does not matter because we ultimately want to style the "style" tag
+-- as an HTML tag, so the actual start rule looks like this:
+--
+-- local css_start_rule = #css_tag * tag
+--
+-- Now that the parent knows when to switch to the child, it needs to know when
+-- to switch back. In the case of HTML/CSS, the switch back occurs when the
+-- lexer encounters an ending "style" tag, though the lexer should still style
+-- the tag as an HTML tag:
+--
+-- local css_end_rule = #P('</style>') * tag
+--
+-- Once the parent loads the child lexer and defines the child's start and end
+-- rules, it embeds the child with the [`lexer.embed()`]() function:
+--
+-- lex:embed(css, css_start_rule, css_end_rule)
+--
+-- ##### Child Lexer
+--
+-- The process for instructing a child lexer to embed itself into a parent is
+-- very similar to embedding a child into a parent: first, load the parent lexer
+-- into the child lexer with the [`lexer.load()`]() function and then create
+-- start and end rules for the child lexer. However, in this case, call
+-- [`lexer.embed()`]() with switched arguments. For example, in the PHP lexer:
+--
+-- local html = lexer.load('html')
+-- local php_start_rule = token('php_tag', '<?php ')
+-- local php_end_rule = token('php_tag', '?>')
+-- lex:add_style('php_tag', lexer.STYLE_EMBEDDED)
+-- html:embed(lex, php_start_rule, php_end_rule)
+--
+-- #### Lexers with Complex State
+--
+-- A vast majority of lexers are not stateful and can operate on any chunk of
+-- text in a document. However, there may be rare cases where a lexer does need
+-- to keep track of some sort of persistent state. Rather than using `lpeg.P`
+-- function patterns that set state variables, it is recommended to make use of
+-- Scintilla's built-in, per-line state integers via [`lexer.line_state`](). It
+-- was designed to accommodate up to 32 bit flags for tracking state.
+-- [`lexer.line_from_position()`]() will return the line for any position given
+-- to an `lpeg.P` function pattern. (Any positions derived from that position
+-- argument will also work.)
+--
+-- Writing stateful lexers is beyond the scope of this document.
+--
+-- ### Code Folding
+--
+-- When reading source code, it is occasionally helpful to temporarily hide
+-- blocks of code like functions, classes, comments, etc. This is the concept of
+-- "folding". In many Scintilla-based editors, such as Textadept, little
+-- indicators in the editor margins appear next to code that can be folded at
+-- places called "fold points". When the user clicks an indicator, the editor
+-- hides the code associated with the indicator until the user clicks the
+-- indicator again. The lexer specifies these fold points and what code exactly
+-- to fold.
+--
+-- The fold points for most languages occur on keywords or character sequences.
+-- Examples of fold keywords are "if" and "end" in Lua and examples of fold
+-- character sequences are '{', '}', "/\*", and "\*/" in C for code block and
+-- comment delimiters, respectively. However, these fold points cannot occur
+-- just anywhere. For example, lexers should not recognize fold keywords that
+-- appear within strings or comments. The [`lexer.add_fold_point()`]() function
+-- allows you to conveniently define fold points with such granularity. For
+-- example, consider C:
+--
+-- lex:add_fold_point(lexer.OPERATOR, '{', '}')
+-- lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+--
+-- The first assignment states that any '{' or '}' that the lexer recognized as
+-- an `lexer.OPERATOR` token is a fold point. Likewise, the second assignment
+-- states that any "/\*" or "\*/" that the lexer recognizes as part of a
+-- `lexer.COMMENT` token is a fold point. The lexer does not consider any
+-- occurrences of these characters outside their defined tokens (such as in a
+-- string) as fold points. How do you specify fold keywords? Here is an example
+-- for Lua:
+--
+-- lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+-- lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+-- lex:add_fold_point(lexer.KEYWORD, 'function', 'end')
+-- lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until')
+--
+-- If your lexer has case-insensitive keywords as fold points, simply add a
+-- `case_insensitive_fold_points = true` option to [`lexer.new()`](), and
+-- specify keywords in lower case.
+--
+-- If your lexer needs to do some additional processing in order to determine if
+-- a token is a fold point, pass a function that returns an integer to
+-- `lex:add_fold_point()`. Returning `1` indicates the token is a beginning fold
+-- point and returning `-1` indicates the token is an ending fold point.
+-- Returning `0` indicates the token is not a fold point. For example:
+--
+-- local function fold_strange_token(text, pos, line, s, symbol)
+-- if ... then
+-- return 1 -- beginning fold point
+-- elseif ... then
+-- return -1 -- ending fold point
+-- end
+-- return 0
+-- end
+--
+-- lex:add_fold_point('strange_token', '|', fold_strange_token)
+--
+-- Any time the lexer encounters a '|' that is a "strange_token", it calls the
+-- `fold_strange_token` function to determine if '|' is a fold point. The lexer
+-- calls these functions with the following arguments: the text to identify fold
+-- points in, the beginning position of the current line in the text to fold,
+-- the current line's text, the position in the current line the fold point text
+-- starts at, and the fold point text itself.
+--
+-- #### Fold by Indentation
+--
+-- Some languages have significant whitespace and/or no delimiters that indicate
+-- fold points. If your lexer falls into this category and you would like to
+-- mark fold points based on changes in indentation, create the lexer with a
+-- `fold_by_indentation = true` option:
+--
+-- local lex = lexer.new('?', {fold_by_indentation = true})
+--
+-- ### Using Lexers
+--
+-- #### Textadept
+--
+-- Put your lexer in your *~/.textadept/lexers/* directory so you do not
+-- overwrite it when upgrading Textadept. Also, lexers in this directory
+-- override default lexers. Thus, Textadept loads a user *lua* lexer instead of
+-- the default *lua* lexer. This is convenient for tweaking a default lexer to
+-- your liking. Then add a [file type][] for your lexer if necessary.
+--
+-- [file type]: textadept.file_types.html
+--
+-- ### Migrating Legacy Lexers
+--
+-- Legacy lexers are of the form:
+--
+-- local l = require('lexer')
+-- local token, word_match = l.token, l.word_match
+-- local P, R, S = lpeg.P, lpeg.R, lpeg.S
+--
+-- local M = {_NAME = '?'}
+--
+-- [... token and pattern definitions ...]
+--
+-- M._rules = {
+-- {'rule', pattern},
+-- [...]
+-- }
+--
+-- M._tokenstyles = {
+-- 'token' = 'style',
+-- [...]
+-- }
+--
+-- M._foldsymbols = {
+-- _patterns = {...},
+-- ['token'] = {['start'] = 1, ['end'] = -1},
+-- [...]
+-- }
+--
+-- return M
+--
+-- While such legacy lexers will be handled just fine without any changes, it is
+-- recommended that you migrate yours. The migration process is fairly
+-- straightforward:
+--
+-- 1. Replace all instances of `l` with `lexer`, as it's better practice and
+-- results in less confusion.
+-- 2. Replace `local M = {_NAME = '?'}` with `local lex = lexer.new('?')`, where
+-- `?` is the name of your legacy lexer. At the end of the lexer, change
+-- `return M` to `return lex`.
+-- 3. Instead of defining rules towards the end of your lexer, define your rules
+-- as you define your tokens and patterns using
+-- [`lex:add_rule()`](#lexer.add_rule).
+-- 4. Similarly, any custom token names should have their styles immediately
+-- defined using [`lex:add_style()`](#lexer.add_style).
+-- 5. Convert any table arguments passed to [`lexer.word_match()`]() to a
+-- space-separated string of words.
+-- 6. Replace any calls to `lexer.embed(M, child, ...)` and
+-- `lexer.embed(parent, M, ...)` with
+-- [`lex:embed`](#lexer.embed)`(child, ...)` and `parent:embed(lex, ...)`,
+-- respectively.
+-- 7. Define fold points with simple calls to
+-- [`lex:add_fold_point()`](#lexer.add_fold_point). No need to mess with Lua
+-- patterns anymore.
+-- 8. Any legacy lexer options such as `M._FOLDBYINDENTATION`, `M._LEXBYLINE`,
+-- `M._lexer`, etc. should be added as table options to [`lexer.new()`]().
+-- 9. Any external lexer rule fetching and/or modifications via `lexer._RULES`
+-- should be changed to use [`lexer.get_rule()`]() and
+-- [`lexer.modify_rule()`]().
+--
+-- As an example, consider the following sample legacy lexer:
+--
+-- local l = require('lexer')
+-- local token, word_match = l.token, l.word_match
+-- local P, R, S = lpeg.P, lpeg.R, lpeg.S
+--
+-- local M = {_NAME = 'legacy'}
+--
+-- local ws = token(l.WHITESPACE, l.space^1)
+-- local comment = token(l.COMMENT, '#' * l.nonnewline^0)
+-- local string = token(l.STRING, l.delimited_range('"'))
+-- local number = token(l.NUMBER, l.float + l.integer)
+-- local keyword = token(l.KEYWORD, word_match{'foo', 'bar', 'baz'})
+-- local custom = token('custom', P('quux'))
+-- local identifier = token(l.IDENTIFIER, l.word)
+-- local operator = token(l.OPERATOR, S('+-*/%^=<>,.()[]{}'))
+--
+-- M._rules = {
+-- {'whitespace', ws},
+-- {'keyword', keyword},
+-- {'custom', custom},
+-- {'identifier', identifier},
+-- {'string', string},
+-- {'comment', comment},
+-- {'number', number},
+-- {'operator', operator}
+-- }
+--
+-- M._tokenstyles = {
+-- 'custom' = l.STYLE_KEYWORD..',bold'
+-- }
+--
+-- M._foldsymbols = {
+-- _patterns = {'[{}]'},
+-- [l.OPERATOR] = {['{'] = 1, ['}'] = -1}
+-- }
+--
+-- return M
+--
+-- Following the migration steps would yield:
+--
+-- local lexer = require('lexer')
+-- local token, word_match = lexer.token, lexer.word_match
+-- local P, R, S = lpeg.P, lpeg.R, lpeg.S
+--
+-- local lex = lexer.new('legacy')
+--
+-- lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+-- lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[foo bar baz]]))
+-- lex:add_rule('custom', token('custom', P('quux')))
+-- lex:add_style('custom', lexer.STYLE_KEYWORD..',bold')
+-- lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+-- lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"')))
+-- lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+-- lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+-- lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%^=<>,.()[]{}')))
+--
+-- lex:add_fold_point(lexer.OPERATOR, '{', '}')
+--
+-- return lex
+--
+-- ### Considerations
+--
+-- #### Performance
+--
+-- There might be some slight overhead when initializing a lexer, but loading a
+-- file from disk into Scintilla is usually more expensive. On modern computer
+-- systems, I see no difference in speed between Lua lexers and Scintilla's C++
+-- ones. Optimize lexers for speed by re-arranging `lexer.add_rule()` calls so
+-- that the most common rules match first. Do keep in mind that order matters
+-- for similar rules.
+--
+-- In some cases, folding may be far more expensive than lexing, particularly
+-- in lexers with a lot of potential fold points. If your lexer is exhibiting
+-- signs of slowness, try disabling folding in your text editor first. If that
+-- speeds things up, you can try reducing the number of fold points you added,
+-- overriding `lexer.fold()` with your own implementation, or simply eliminating
+-- folding support from your lexer.
+--
+-- #### Limitations
+--
+-- Embedded preprocessor languages like PHP cannot completely embed in their
+-- parent languages in that the parent's tokens do not support start and end
+-- rules. This mostly goes unnoticed, but code like
+--
+-- <div id="<?php echo $id; ?>">
+--
+-- will not style correctly.
+--
+-- #### Troubleshooting
+--
+-- Errors in lexers can be tricky to debug. Lexers print Lua errors to
+-- `io.stderr` and `_G.print()` statements to `io.stdout`. Running your editor
+-- from a terminal is the easiest way to see errors as they occur.
+--
+-- #### Risks
+--
+-- Poorly written lexers have the ability to crash Scintilla (and thus its
+-- containing application), so unsaved data might be lost. However, I have only
+-- observed these crashes in early lexer development, when syntax errors or
+-- pattern errors are present. Once the lexer actually starts styling text
+-- (either correctly or incorrectly, it does not matter), I have not observed
+-- any crashes.
+--
+-- #### Acknowledgements
+--
+-- Thanks to Peter Odding for his [lexer post][] on the Lua mailing list
+-- that inspired me, and thanks to Roberto Ierusalimschy for LPeg.
+--
+-- [lexer post]: http://lua-users.org/lists/lua-l/2007-04/msg00116.html
+-- @field path (string)
+-- The path used to search for a lexer to load.
+-- Identical in format to Lua's `package.path` string.
+-- The default value is `package.path`.
+-- @field DEFAULT (string)
+-- The token name for default tokens.
+-- @field WHITESPACE (string)
+-- The token name for whitespace tokens.
+-- @field COMMENT (string)
+-- The token name for comment tokens.
+-- @field STRING (string)
+-- The token name for string tokens.
+-- @field NUMBER (string)
+-- The token name for number tokens.
+-- @field KEYWORD (string)
+-- The token name for keyword tokens.
+-- @field IDENTIFIER (string)
+-- The token name for identifier tokens.
+-- @field OPERATOR (string)
+-- The token name for operator tokens.
+-- @field ERROR (string)
+-- The token name for error tokens.
+-- @field PREPROCESSOR (string)
+-- The token name for preprocessor tokens.
+-- @field CONSTANT (string)
+-- The token name for constant tokens.
+-- @field VARIABLE (string)
+-- The token name for variable tokens.
+-- @field FUNCTION (string)
+-- The token name for function tokens.
+-- @field CLASS (string)
+-- The token name for class tokens.
+-- @field TYPE (string)
+-- The token name for type tokens.
+-- @field LABEL (string)
+-- The token name for label tokens.
+-- @field REGEX (string)
+-- The token name for regex tokens.
+-- @field STYLE_CLASS (string)
+-- The style typically used for class definitions.
+-- @field STYLE_COMMENT (string)
+-- The style typically used for code comments.
+-- @field STYLE_CONSTANT (string)
+-- The style typically used for constants.
+-- @field STYLE_ERROR (string)
+-- The style typically used for erroneous syntax.
+-- @field STYLE_FUNCTION (string)
+-- The style typically used for function definitions.
+-- @field STYLE_KEYWORD (string)
+-- The style typically used for language keywords.
+-- @field STYLE_LABEL (string)
+-- The style typically used for labels.
+-- @field STYLE_NUMBER (string)
+-- The style typically used for numbers.
+-- @field STYLE_OPERATOR (string)
+-- The style typically used for operators.
+-- @field STYLE_REGEX (string)
+-- The style typically used for regular expression strings.
+-- @field STYLE_STRING (string)
+-- The style typically used for strings.
+-- @field STYLE_PREPROCESSOR (string)
+-- The style typically used for preprocessor statements.
+-- @field STYLE_TYPE (string)
+-- The style typically used for static types.
+-- @field STYLE_VARIABLE (string)
+-- The style typically used for variables.
+-- @field STYLE_WHITESPACE (string)
+-- The style typically used for whitespace.
+-- @field STYLE_EMBEDDED (string)
+-- The style typically used for embedded code.
+-- @field STYLE_IDENTIFIER (string)
+-- The style typically used for identifier words.
+-- @field STYLE_DEFAULT (string)
+-- The style all styles are based off of.
+-- @field STYLE_LINENUMBER (string)
+-- The style used for all margins except fold margins.
+-- @field STYLE_BRACELIGHT (string)
+-- The style used for highlighted brace characters.
+-- @field STYLE_BRACEBAD (string)
+-- The style used for unmatched brace characters.
+-- @field STYLE_CONTROLCHAR (string)
+-- The style used for control characters.
+-- Color attributes are ignored.
+-- @field STYLE_INDENTGUIDE (string)
+-- The style used for indentation guides.
+-- @field STYLE_CALLTIP (string)
+-- The style used by call tips if [`buffer.call_tip_use_style`]() is set.
+-- Only the font name, size, and color attributes are used.
+-- @field STYLE_FOLDDISPLAYTEXT (string)
+-- The style used for fold display text.
+-- @field any (pattern)
+-- A pattern that matches any single character.
+-- @field ascii (pattern)
+-- A pattern that matches any ASCII character (codes 0 to 127).
+-- @field extend (pattern)
+-- A pattern that matches any ASCII extended character (codes 0 to 255).
+-- @field alpha (pattern)
+-- A pattern that matches any alphabetic character ('A'-'Z', 'a'-'z').
+-- @field digit (pattern)
+-- A pattern that matches any digit ('0'-'9').
+-- @field alnum (pattern)
+-- A pattern that matches any alphanumeric character ('A'-'Z', 'a'-'z',
+-- '0'-'9').
+-- @field lower (pattern)
+-- A pattern that matches any lower case character ('a'-'z').
+-- @field upper (pattern)
+-- A pattern that matches any upper case character ('A'-'Z').
+-- @field xdigit (pattern)
+-- A pattern that matches any hexadecimal digit ('0'-'9', 'A'-'F', 'a'-'f').
+-- @field cntrl (pattern)
+-- A pattern that matches any control character (ASCII codes 0 to 31).
+-- @field graph (pattern)
+-- A pattern that matches any graphical character ('!' to '~').
+-- @field print (pattern)
+-- A pattern that matches any printable character (' ' to '~').
+-- @field punct (pattern)
+-- A pattern that matches any punctuation character ('!' to '/', ':' to '@',
+-- '[' to ''', '{' to '~').
+-- @field space (pattern)
+-- A pattern that matches any whitespace character ('\t', '\v', '\f', '\n',
+-- '\r', space).
+-- @field newline (pattern)
+-- A pattern that matches any set of end of line characters.
+-- @field nonnewline (pattern)
+-- A pattern that matches any single, non-newline character.
+-- @field nonnewline_esc (pattern)
+-- A pattern that matches any single, non-newline character or any set of end
+-- of line characters escaped with '\'.
+-- @field dec_num (pattern)
+-- A pattern that matches a decimal number.
+-- @field hex_num (pattern)
+-- A pattern that matches a hexadecimal number.
+-- @field oct_num (pattern)
+-- A pattern that matches an octal number.
+-- @field integer (pattern)
+-- A pattern that matches either a decimal, hexadecimal, or octal number.
+-- @field float (pattern)
+-- A pattern that matches a floating point number.
+-- @field word (pattern)
+-- A pattern that matches a typical word. Words begin with a letter or
+-- underscore and consist of alphanumeric and underscore characters.
+-- @field FOLD_BASE (number)
+-- The initial (root) fold level.
+-- @field FOLD_BLANK (number)
+-- Flag indicating that the line is blank.
+-- @field FOLD_HEADER (number)
+-- Flag indicating the line is fold point.
+-- @field fold_level (table, Read-only)
+-- Table of fold level bit-masks for line numbers starting from zero.
+-- Fold level masks are composed of an integer level combined with any of the
+-- following bits:
+--
+-- * `lexer.FOLD_BASE`
+-- The initial fold level.
+-- * `lexer.FOLD_BLANK`
+-- The line is blank.
+-- * `lexer.FOLD_HEADER`
+-- The line is a header, or fold point.
+-- @field indent_amount (table, Read-only)
+-- Table of indentation amounts in character columns, for line numbers
+-- starting from zero.
+-- @field line_state (table)
+-- Table of integer line states for line numbers starting from zero.
+-- Line states can be used by lexers for keeping track of persistent states.
+-- @field property (table)
+-- Map of key-value string pairs.
+-- @field property_expanded (table, Read-only)
+-- Map of key-value string pairs with `$()` and `%()` variable replacement
+-- performed in values.
+-- @field property_int (table, Read-only)
+-- Map of key-value pairs with values interpreted as numbers, or `0` if not
+-- found.
+-- @field style_at (table, Read-only)
+-- Table of style names at positions in the buffer starting from 1.
+module('lexer')]=]
+
+local lpeg = require('lpeg')
+local lpeg_P, lpeg_R, lpeg_S, lpeg_V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+local lpeg_Ct, lpeg_Cc, lpeg_Cp = lpeg.Ct, lpeg.Cc, lpeg.Cp
+local lpeg_Cmt, lpeg_C = lpeg.Cmt, lpeg.C
+local lpeg_match = lpeg.match
+
+M.path = package.path
+
+if not package.searchpath then
+ -- Searches for the given *name* in the given *path*.
+ -- This is an implementation of Lua 5.2's `package.searchpath()` function for
+ -- Lua 5.1.
+ function package.searchpath(name, path)
+ local tried = {}
+ for part in path:gmatch('[^;]+') do
+ local filename = part:gsub('%?', name)
+ local f = io.open(filename, 'r')
+ if f then
+ f:close()
+ return filename
+ end
+ tried[#tried + 1] = string.format("no file '%s'", filename)
+ end
+ return nil, table.concat(tried, '\n')
+ end
+end
+
+local string_upper = string.upper
+-- Default styles.
+local default = {
+ 'nothing', 'whitespace', 'comment', 'string', 'number', 'keyword',
+ 'identifier', 'operator', 'error', 'preprocessor', 'constant', 'variable',
+ 'function', 'class', 'type', 'label', 'regex', 'embedded'
+}
+for i = 1, #default do
+ local name, upper_name = default[i], string_upper(default[i])
+ M[upper_name], M['STYLE_'..upper_name] = name, '$(style.'..name..')'
+end
+-- Predefined styles.
+local predefined = {
+ 'default', 'linenumber', 'bracelight', 'bracebad', 'controlchar',
+ 'indentguide', 'calltip', 'folddisplaytext'
+}
+for i = 1, #predefined do
+ local name, upper_name = predefined[i], string_upper(predefined[i])
+ M[upper_name], M['STYLE_'..upper_name] = name, '$(style.'..name..')'
+end
+
+---
+-- Adds pattern *rule* identified by string *id* to the ordered list of rules
+-- for lexer *lexer*.
+-- @param lexer The lexer to add the given rule to.
+-- @param id The id associated with this rule. It does not have to be the same
+-- as the name passed to `token()`.
+-- @param rule The LPeg pattern of the rule.
+-- @see modify_rule
+-- @name add_rule
+function M.add_rule(lexer, id, rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ if not lexer._RULES then
+ lexer._RULES = {}
+ -- Contains an ordered list (by numerical index) of rule names. This is used
+ -- in conjunction with lexer._RULES for building _TOKENRULE.
+ lexer._RULEORDER = {}
+ end
+ lexer._RULES[id] = rule
+ lexer._RULEORDER[#lexer._RULEORDER + 1] = id
+ lexer:build_grammar()
+end
+
+---
+-- Replaces in lexer *lexer* the existing rule identified by string *id* with
+-- pattern *rule*.
+-- @param lexer The lexer to modify.
+-- @param id The id associated with this rule.
+-- @param rule The LPeg pattern of the rule.
+-- @name modify_rule
+function M.modify_rule(lexer, id, rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ lexer._RULES[id] = rule
+ lexer:build_grammar()
+end
+
+---
+-- Returns the rule identified by string *id*.
+-- @param lexer The lexer to fetch a rule from.
+-- @param id The id of the rule to fetch.
+-- @return pattern
+-- @name get_rule
+function M.get_rule(lexer, id)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ return lexer._RULES[id]
+end
+
+---
+-- Associates string *token_name* in lexer *lexer* with Scintilla style string
+-- *style*.
+-- Style strings are comma-separated property settings. Available property
+-- settings are:
+--
+-- * `font:name`: Font name.
+-- * `size:int`: Font size.
+-- * `bold` or `notbold`: Whether or not the font face is bold.
+-- * `weight:int`: Font weight (between 1 and 999).
+-- * `italics` or `notitalics`: Whether or not the font face is italic.
+-- * `underlined` or `notunderlined`: Whether or not the font face is
+-- underlined.
+-- * `fore:color`: Font face foreground color in "#RRGGBB" or 0xBBGGRR format.
+-- * `back:color`: Font face background color in "#RRGGBB" or 0xBBGGRR format.
+-- * `eolfilled` or `noteolfilled`: Whether or not the background color
+-- extends to the end of the line.
+-- * `case:char`: Font case ('u' for uppercase, 'l' for lowercase, and 'm' for
+-- mixed case).
+-- * `visible` or `notvisible`: Whether or not the text is visible.
+-- * `changeable` or `notchangeable`: Whether or not the text is changeable or
+-- read-only.
+--
+-- Property settings may also contain "$(property.name)" expansions for
+-- properties defined in Scintilla, theme files, etc.
+-- @param lexer The lexer to add a style to.
+-- @param token_name The name of the token to associated with the style.
+-- @param style A style string for Scintilla.
+-- @usage lex:add_style('longstring', lexer.STYLE_STRING)
+-- @usage lex:add_style('deprecated_function', lexer.STYLE_FUNCTION..',italics')
+-- @usage lex:add_style('visible_ws',
+-- lexer.STYLE_WHITESPACE..',back:$(color.grey)')
+-- @name add_style
+function M.add_style(lexer, token_name, style)
+ local num_styles = lexer._numstyles
+ if num_styles == 32 then num_styles = num_styles + 8 end -- skip predefined
+ if num_styles >= 255 then print('Too many styles defined (255 MAX)') end
+ lexer._TOKENSTYLES[token_name], lexer._numstyles = num_styles, num_styles + 1
+ lexer._EXTRASTYLES[token_name] = style
+ -- If the lexer is a proxy or a child that embedded itself, copy this style to
+ -- the parent lexer.
+ if lexer._lexer then lexer._lexer:add_style(token_name, style) end
+end
+
+---
+-- Adds to lexer *lexer* a fold point whose beginning and end tokens are string
+-- *token_name* tokens with string content *start_symbol* and *end_symbol*,
+-- respectively.
+-- In the event that *start_symbol* may or may not be a fold point depending on
+-- context, and that additional processing is required, *end_symbol* may be a
+-- function that ultimately returns `1` (indicating a beginning fold point),
+-- `-1` (indicating an ending fold point), or `0` (indicating no fold point).
+-- That function is passed the following arguments:
+--
+-- * `text`: The text being processed for fold points.
+-- * `pos`: The position in *text* of the beginning of the line currently
+-- being processed.
+-- * `line`: The text of the line currently being processed.
+-- * `s`: The position of *start_symbol* in *line*.
+-- * `symbol`: *start_symbol* itself.
+-- @param lexer The lexer to add a fold point to.
+-- @param token_name The token name of text that indicates a fold point.
+-- @param start_symbol The text that indicates the beginning of a fold point.
+-- @param end_symbol Either the text that indicates the end of a fold point, or
+-- a function that returns whether or not *start_symbol* is a beginning fold
+-- point (1), an ending fold point (-1), or not a fold point at all (0).
+-- @usage lex:add_fold_point(lexer.OPERATOR, '{', '}')
+-- @usage lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+-- @usage lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+-- @usage lex:add_fold_point('custom', function(text, pos, line, s, symbol)
+-- ... end)
+-- @name add_fold_point
+function M.add_fold_point(lexer, token_name, start_symbol, end_symbol)
+ if not lexer._FOLDPOINTS then lexer._FOLDPOINTS = {_SYMBOLS = {}} end
+ local symbols = lexer._FOLDPOINTS._SYMBOLS
+ if not symbols[start_symbol] then
+ symbols[#symbols + 1], symbols[start_symbol] = start_symbol, true
+ end
+ if not lexer._FOLDPOINTS[token_name] then
+ lexer._FOLDPOINTS[token_name] = {}
+ end
+ if type(end_symbol) == 'string' then
+ if not symbols[end_symbol] then
+ symbols[#symbols + 1], symbols[end_symbol] = end_symbol, true
+ end
+ lexer._FOLDPOINTS[token_name][start_symbol] = 1
+ lexer._FOLDPOINTS[token_name][end_symbol] = -1
+ else
+ lexer._FOLDPOINTS[token_name][start_symbol] = end_symbol -- function or int
+ end
+ -- If the lexer is a proxy or a child that embedded itself, copy this fold
+ -- point to the parent lexer.
+ if lexer._lexer then
+ lexer._lexer:add_fold_point(token_name, start_symbol, end_symbol)
+ end
+end
+
+-- (Re)constructs `lexer._TOKENRULE`.
+local function join_tokens(lexer)
+ local patterns, order = lexer._RULES, lexer._RULEORDER
+ local token_rule = patterns[order[1]]
+ for i = 2, #order do token_rule = token_rule + patterns[order[i]] end
+ lexer._TOKENRULE = token_rule + M.token(M.DEFAULT, M.any)
+ return lexer._TOKENRULE
+end
+
+-- Metatable for lexer grammars.
+-- These grammars are just tables ultimately passed to `lpeg.P()`.
+local grammar_mt = {__index = {
+ -- Adds lexer *lexer* and any of its embedded lexers to this grammar.
+ -- @param lexer The lexer to add.
+ add_lexer = function(self, lexer)
+ local token_rule = lexer:join_tokens()
+ for i = 1, #lexer._CHILDREN do
+ local child = lexer._CHILDREN[i]
+ if child._CHILDREN then self:add_lexer(child) end
+ local rules = child._EMBEDDEDRULES[lexer._NAME]
+ local rules_token_rule = self['__'..child._NAME] or rules.token_rule
+ self[child._NAME] = (-rules.end_rule * rules_token_rule)^0 *
+ rules.end_rule^-1 * lpeg_V(lexer._NAME)
+ local embedded_child = '_'..child._NAME
+ self[embedded_child] = rules.start_rule *
+ (-rules.end_rule * rules_token_rule)^0 *
+ rules.end_rule^-1
+ token_rule = lpeg_V(embedded_child) + token_rule
+ end
+ self['__'..lexer._NAME] = token_rule -- can contain embedded lexer rules
+ self[lexer._NAME] = token_rule^0
+ end
+}}
+
+-- (Re)constructs `lexer._GRAMMAR`.
+-- @param initial_rule The name of the rule to start lexing with. The default
+-- value is `lexer._NAME`. Multilang lexers use this to start with a child
+-- rule if necessary.
+local function build_grammar(lexer, initial_rule)
+ if not lexer._RULES then return end
+ if lexer._CHILDREN then
+ if not initial_rule then initial_rule = lexer._NAME end
+ local grammar = setmetatable({initial_rule}, grammar_mt)
+ grammar:add_lexer(lexer)
+ lexer._INITIALRULE = initial_rule
+ lexer._GRAMMAR = lpeg_Ct(lpeg_P(grammar))
+ else
+ lexer._GRAMMAR = lpeg_Ct(lexer:join_tokens()^0)
+ end
+end
+
+---
+-- Embeds child lexer *child* in parent lexer *lexer* using patterns
+-- *start_rule* and *end_rule*, which signal the beginning and end of the
+-- embedded lexer, respectively.
+-- @param lexer The parent lexer.
+-- @param child The child lexer.
+-- @param start_rule The pattern that signals the beginning of the embedded
+-- lexer.
+-- @param end_rule The pattern that signals the end of the embedded lexer.
+-- @usage html:embed(css, css_start_rule, css_end_rule)
+-- @usage html:embed(lex, php_start_rule, php_end_rule) -- from php lexer
+-- @name embed
+function M.embed(lexer, child, start_rule, end_rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ -- Add child rules.
+ if not child._EMBEDDEDRULES then child._EMBEDDEDRULES = {} end
+ if not child._RULES then error('Cannot embed lexer with no rules') end
+ child._EMBEDDEDRULES[lexer._NAME] = {
+ ['start_rule'] = start_rule,
+ token_rule = child:join_tokens(),
+ ['end_rule'] = end_rule
+ }
+ if not lexer._CHILDREN then lexer._CHILDREN = {} end
+ local children = lexer._CHILDREN
+ children[#children + 1] = child
+ -- Add child styles.
+ for token, style in pairs(child._EXTRASTYLES) do
+ lexer:add_style(token, style)
+ end
+ -- Add child fold symbols.
+ if child._FOLDPOINTS then
+ for token_name, symbols in pairs(child._FOLDPOINTS) do
+ if token_name ~= '_SYMBOLS' then
+ for symbol, v in pairs(symbols) do
+ lexer:add_fold_point(token_name, symbol, v)
+ end
+ end
+ end
+ end
+ lexer:build_grammar()
+ child._lexer = lexer -- use parent's tokens if child is embedding itself
+end
+
+---
+-- Lexes a chunk of text *text* (that has an initial style number of
+-- *init_style*) using lexer *lexer*, returning a table of token names and
+-- positions.
+-- @param lexer The lexer to lex text with.
+-- @param text The text in the buffer to lex.
+-- @param init_style The current style. Multiple-language lexers use this to
+-- determine which language to start lexing in.
+-- @return table of token names and positions.
+-- @name lex
+function M.lex(lexer, text, init_style)
+ if not lexer._GRAMMAR then return {M.DEFAULT, #text + 1} end
+ if not lexer._LEXBYLINE then
+ -- For multilang lexers, build a new grammar whose initial_rule is the
+ -- current language.
+ if lexer._CHILDREN then
+ for style, style_num in pairs(lexer._TOKENSTYLES) do
+ if style_num == init_style then
+ local lexer_name = style:match('^(.+)_whitespace') or lexer._NAME
+ if lexer._INITIALRULE ~= lexer_name then
+ lexer:build_grammar(lexer_name)
+ end
+ break
+ end
+ end
+ end
+ return lpeg_match(lexer._GRAMMAR, text)
+ else
+ local tokens = {}
+ local function append(tokens, line_tokens, offset)
+ for i = 1, #line_tokens, 2 do
+ tokens[#tokens + 1] = line_tokens[i]
+ tokens[#tokens + 1] = line_tokens[i + 1] + offset
+ end
+ end
+ local offset = 0
+ local grammar = lexer._GRAMMAR
+ for line in text:gmatch('[^\r\n]*\r?\n?') do
+ local line_tokens = lpeg_match(grammar, line)
+ if line_tokens then append(tokens, line_tokens, offset) end
+ offset = offset + #line
+ -- Use the default style to the end of the line if none was specified.
+ if tokens[#tokens] ~= offset then
+ tokens[#tokens + 1], tokens[#tokens + 2] = 'default', offset + 1
+ end
+ end
+ return tokens
+ end
+end
+
+---
+-- Determines fold points in a chunk of text *text* using lexer *lexer*,
+-- returning a table of fold levels associated with line numbers.
+-- *text* starts at position *start_pos* on line number *start_line* with a
+-- beginning fold level of *start_level* in the buffer.
+-- @param lexer The lexer to fold text with.
+-- @param text The text in the buffer to fold.
+-- @param start_pos The position in the buffer *text* starts at, starting at
+-- zero.
+-- @param start_line The line number *text* starts on.
+-- @param start_level The fold level *text* starts on.
+-- @return table of fold levels associated with line numbers.
+-- @name fold
+function M.fold(lexer, text, start_pos, start_line, start_level)
+ local folds = {}
+ if text == '' then return folds end
+ local fold = M.property_int['fold'] > 0
+ local FOLD_BASE = M.FOLD_BASE
+ local FOLD_HEADER, FOLD_BLANK = M.FOLD_HEADER, M.FOLD_BLANK
+ if fold and lexer._FOLDPOINTS then
+ local lines = {}
+ for p, l in (text..'\n'):gmatch('()(.-)\r?\n') do
+ lines[#lines + 1] = {p, l}
+ end
+ local fold_zero_sum_lines = M.property_int['fold.on.zero.sum.lines'] > 0
+ local fold_compact = M.property_int['fold.compact'] > 0
+ local fold_points = lexer._FOLDPOINTS
+ local fold_point_symbols = fold_points._SYMBOLS
+ local style_at, fold_level = M.style_at, M.fold_level
+ local line_num, prev_level = start_line, start_level
+ local current_level = prev_level
+ for i = 1, #lines do
+ local pos, line = lines[i][1], lines[i][2]
+ if line ~= '' then
+ if lexer._CASEINSENSITIVEFOLDPOINTS then line = line:lower() end
+ local level_decreased = false
+ for j = 1, #fold_point_symbols do
+ local symbol = fold_point_symbols[j]
+ local word = not symbol:find('[^%w_]')
+ local s, e = line:find(symbol, 1, true)
+ while s and e do
+ --if not word or line:find('^%f[%w_]'..symbol..'%f[^%w_]', s) then
+ if not word or not ((s > 1 and line:find('^[%w_]', s - 1)) or
+ line:find('^[%w_]', e + 1)) then
+ local symbols = fold_points[style_at[start_pos + pos + s - 1]]
+ local level = symbols and symbols[symbol]
+ if type(level) == 'function' then
+ level = level(text, pos, line, s, symbol)
+ end
+ if type(level) == 'number' then
+ current_level = current_level + level
+ if level < 0 and current_level < prev_level then
+ -- Potential zero-sum line. If the level were to go back up on
+ -- the same line, the line may be marked as a fold header.
+ level_decreased = true
+ end
+ end
+ end
+ s = line:find(symbol, s + 1, true)
+ end
+ end
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ elseif level_decreased and current_level == prev_level and
+ fold_zero_sum_lines then
+ if line_num > start_line then
+ folds[line_num] = prev_level - 1 + FOLD_HEADER
+ else
+ -- Typing within a zero-sum line.
+ local level = fold_level[line_num - 1] - 1
+ if level > FOLD_HEADER then level = level - FOLD_HEADER end
+ if level > FOLD_BLANK then level = level - FOLD_BLANK end
+ folds[line_num] = level + FOLD_HEADER
+ current_level = current_level + 1
+ end
+ end
+ if current_level < FOLD_BASE then current_level = FOLD_BASE end
+ prev_level = current_level
+ else
+ folds[line_num] = prev_level + (fold_compact and FOLD_BLANK or 0)
+ end
+ line_num = line_num + 1
+ end
+ elseif fold and (lexer._FOLDBYINDENTATION or
+ M.property_int['fold.by.indentation'] > 0) then
+ -- Indentation based folding.
+ -- Calculate indentation per line.
+ local indentation = {}
+ for indent, line in (text..'\n'):gmatch('([\t ]*)([^\r\n]*)\r?\n') do
+ indentation[#indentation + 1] = line ~= '' and #indent
+ end
+ -- Find the first non-blank line before start_line. If the current line is
+ -- indented, make that previous line a header and update the levels of any
+ -- blank lines inbetween. If the current line is blank, match the level of
+ -- the previous non-blank line.
+ local current_level = start_level
+ for i = start_line - 1, 0, -1 do
+ local level = M.fold_level[i]
+ if level >= FOLD_HEADER then level = level - FOLD_HEADER end
+ if level < FOLD_BLANK then
+ local indent = M.indent_amount[i]
+ if indentation[1] and indentation[1] > indent then
+ folds[i] = FOLD_BASE + indent + FOLD_HEADER
+ for j = i + 1, start_line - 1 do
+ folds[j] = start_level + FOLD_BLANK
+ end
+ elseif not indentation[1] then
+ current_level = FOLD_BASE + indent
+ end
+ break
+ end
+ end
+ -- Iterate over lines, setting fold numbers and fold flags.
+ for i = 1, #indentation do
+ if indentation[i] then
+ current_level = FOLD_BASE + indentation[i]
+ folds[start_line + i - 1] = current_level
+ for j = i + 1, #indentation do
+ if indentation[j] then
+ if FOLD_BASE + indentation[j] > current_level then
+ folds[start_line + i - 1] = current_level + FOLD_HEADER
+ current_level = FOLD_BASE + indentation[j] -- for any blanks below
+ end
+ break
+ end
+ end
+ else
+ folds[start_line + i - 1] = current_level + FOLD_BLANK
+ end
+ end
+ else
+ -- No folding, reset fold levels if necessary.
+ local current_line = start_line
+ for _ in text:gmatch('\r?\n') do
+ folds[current_line] = start_level
+ current_line = current_line + 1
+ end
+ end
+ return folds
+end
+
+---
+-- Creates a returns a new lexer with the given name.
+-- @param name The lexer's name.
+-- @param opts Table of lexer options. Options currently supported:
+-- * `lex_by_line`: Whether or not the lexer only processes whole lines of
+-- text (instead of arbitrary chunks of text) at a time.
+-- Line lexers cannot look ahead to subsequent lines.
+-- The default value is `false`.
+-- * `fold_by_indentation`: Whether or not the lexer does not define any fold
+-- points and that fold points should be calculated based on changes in line
+-- indentation.
+-- The default value is `false`.
+-- * `case_insensitive_fold_points`: Whether or not fold points added via
+-- `lexer.add_fold_point()` ignore case.
+-- The default value is `false`.
+-- * `inherit`: Lexer to inherit from.
+-- The default value is `nil`.
+-- @usage lexer.new('rhtml', {inherit = lexer.load('html')})
+-- @name new
+function M.new(name, opts)
+ local lexer = {
+ _NAME = assert(name, 'lexer name expected'),
+ _LEXBYLINE = opts and opts['lex_by_line'],
+ _FOLDBYINDENTATION = opts and opts['fold_by_indentation'],
+ _CASEINSENSITIVEFOLDPOINTS = opts and opts['case_insensitive_fold_points'],
+ _lexer = opts and opts['inherit']
+ }
+
+ -- Create the initial maps for token names to style numbers and styles.
+ local token_styles = {}
+ for i = 1, #default do token_styles[default[i]] = i - 1 end
+ for i = 1, #predefined do token_styles[predefined[i]] = i + 31 end
+ lexer._TOKENSTYLES, lexer._numstyles = token_styles, #default
+ lexer._EXTRASTYLES = {}
+
+ return setmetatable(lexer, {__index = {
+ add_rule = M.add_rule, modify_rule = M.modify_rule, get_rule = M.get_rule,
+ add_style = M.add_style, add_fold_point = M.add_fold_point,
+ join_tokens = join_tokens, build_grammar = build_grammar, embed = M.embed,
+ lex = M.lex, fold = M.fold
+ }})
+end
+
+-- Legacy support for older lexers.
+-- Processes the `lex._rules`, `lex._tokenstyles`, and `lex._foldsymbols`
+-- tables.
+-- Since legacy lexers may be processed up to twice, ensure their default styles
+-- and rules are not processed more than once.
+local function process_legacy_lexer(lexer)
+ local function warn(msg) --[[io.stderr:write(msg, "\n")]] end
+ if not lexer._LEGACY then
+ lexer._LEGACY = true
+ warn("lexers as tables are deprecated; use 'lexer.new()'")
+ local token_styles = {}
+ for i = 1, #default do token_styles[default[i]] = i - 1 end
+ for i = 1, #predefined do token_styles[predefined[i]] = i + 31 end
+ lexer._TOKENSTYLES, lexer._numstyles = token_styles, #default
+ lexer._EXTRASTYLES = {}
+ setmetatable(lexer, getmetatable(M.new('')))
+ if lexer._rules then
+ warn("lexer '_rules' table is deprecated; use 'add_rule()'")
+ for i = 1, #lexer._rules do
+ lexer:add_rule(lexer._rules[i][1], lexer._rules[i][2])
+ end
+ end
+ end
+ if lexer._tokenstyles then
+ warn("lexer '_tokenstyles' table is deprecated; use 'add_style()'")
+ for token, style in pairs(lexer._tokenstyles) do
+ -- If this legacy lexer is being processed a second time, only add styles
+ -- added since the first processing.
+ if not lexer._TOKENSTYLES[token] then lexer:add_style(token, style) end
+ end
+ end
+ if lexer._foldsymbols then
+ warn("lexer '_foldsymbols' table is deprecated; use 'add_fold_point()'")
+ for token_name, symbols in pairs(lexer._foldsymbols) do
+ if type(symbols) == 'table' and token_name ~= '_patterns' then
+ for symbol, v in pairs(symbols) do
+ lexer:add_fold_point(token_name, symbol, v)
+ end
+ end
+ end
+ if lexer._foldsymbols._case_insensitive then
+ lexer._CASEINSENSITIVEFOLDPOINTS = true
+ end
+ end
+end
+
+local lexers = {} -- cache of loaded lexers
+---
+-- Initializes or loads and returns the lexer of string name *name*.
+-- Scintilla calls this function in order to load a lexer. Parent lexers also
+-- call this function in order to load child lexers and vice-versa. The user
+-- calls this function in order to load a lexer when using this module as a Lua
+-- library.
+-- @param name The name of the lexing language.
+-- @param alt_name The alternate name of the lexing language. This is useful for
+-- embedding the same child lexer with multiple sets of start and end tokens.
+-- @param cache Flag indicating whether or not to load lexers from the cache.
+-- This should only be `true` when initially loading a lexer (e.g. not from
+-- within another lexer for embedding purposes).
+-- The default value is `false`.
+-- @return lexer object
+-- @name load
+function M.load(name, alt_name, cache)
+ if cache and lexers[alt_name or name] then return lexers[alt_name or name] end
+
+ -- When using this module as a stand-alone module, the `property` and
+ -- `property_int` tables do not exist (they are not useful). Create them in
+ -- order prevent errors from occurring.
+ if not M.property then
+ M.property, M.property_int = {}, setmetatable({}, {
+ __index = function(t, k) return tonumber(M.property[k]) or 0 end,
+ __newindex = function() error('read-only property') end
+ })
+ end
+
+ -- Load the language lexer with its rules, styles, etc.
+ -- However, replace the default `WHITESPACE` style name with a unique
+ -- whitespace style name (and then automatically add it afterwards), since
+ -- embedded lexing relies on these unique whitespace style names. Note that
+ -- loading embedded lexers changes `WHITESPACE` again, so when adding it
+ -- later, do not reference the potentially incorrect value.
+ M.WHITESPACE = (alt_name or name)..'_whitespace'
+ local lexer = dofile(assert(package.searchpath(name, M.path)))
+ assert(lexer, string.format("'%s.lua' did not return a lexer", name))
+ if alt_name then lexer._NAME = alt_name end
+ if not getmetatable(lexer) or lexer._LEGACY then
+ -- A legacy lexer may need to be processed a second time in order to pick up
+ -- any `_tokenstyles` or `_foldsymbols` added after `lexer.embed_lexer()`.
+ process_legacy_lexer(lexer)
+ if lexer._lexer and lexer._lexer._LEGACY then
+ process_legacy_lexer(lexer._lexer) -- mainly for `_foldsymbols` edits
+ end
+ end
+ lexer:add_style((alt_name or name)..'_whitespace', M.STYLE_WHITESPACE)
+
+ -- If the lexer is a proxy or a child that embedded itself, set the parent to
+ -- be the main lexer.
+ if lexer._lexer then
+ lexer = lexer._lexer
+ lexer._NAME = alt_name or name
+ end
+
+ if cache then lexers[alt_name or name] = lexer end
+ return lexer
+end
+
+-- The following are utility functions lexers will have access to.
+
+-- Common patterns.
+M.any = lpeg_P(1)
+M.ascii = lpeg_R('\000\127')
+M.extend = lpeg_R('\000\255')
+M.alpha = lpeg_R('AZ', 'az')
+M.digit = lpeg_R('09')
+M.alnum = lpeg_R('AZ', 'az', '09')
+M.lower = lpeg_R('az')
+M.upper = lpeg_R('AZ')
+M.xdigit = lpeg_R('09', 'AF', 'af')
+M.cntrl = lpeg_R('\000\031')
+M.graph = lpeg_R('!~')
+M.print = lpeg_R(' ~')
+M.punct = lpeg_R('!/', ':@', '[\'', '{~')
+M.space = lpeg_S('\t\v\f\n\r ')
+
+M.newline = lpeg_S('\r\n\f')^1
+M.nonnewline = 1 - M.newline
+M.nonnewline_esc = 1 - (M.newline + '\\') + '\\' * M.any
+
+M.dec_num = M.digit^1
+M.hex_num = '0' * lpeg_S('xX') * M.xdigit^1
+M.oct_num = '0' * lpeg_R('07')^1
+M.integer = lpeg_S('+-')^-1 * (M.hex_num + M.oct_num + M.dec_num)
+M.float = lpeg_S('+-')^-1 *
+ ((M.digit^0 * '.' * M.digit^1 + M.digit^1 * '.' * M.digit^0) *
+ (lpeg_S('eE') * lpeg_S('+-')^-1 * M.digit^1)^-1 +
+ (M.digit^1 * lpeg_S('eE') * lpeg_S('+-')^-1 * M.digit^1))
+
+M.word = (M.alpha + '_') * (M.alnum + '_')^0
+
+---
+-- Creates and returns a token pattern with token name *name* and pattern
+-- *patt*.
+-- If *name* is not a predefined token name, its style must be defined via
+-- `lexer.add_style()`.
+-- @param name The name of token. If this name is not a predefined token name,
+-- then a style needs to be assiciated with it via `lexer.add_style()`.
+-- @param patt The LPeg pattern associated with the token.
+-- @return pattern
+-- @usage local ws = token(lexer.WHITESPACE, lexer.space^1)
+-- @usage local annotation = token('annotation', '@' * lexer.word)
+-- @name token
+function M.token(name, patt)
+ return lpeg_Cc(name) * patt * lpeg_Cp()
+end
+
+---
+-- Creates and returns a pattern that matches a range of text bounded by
+-- *chars* characters.
+-- This is a convenience function for matching more complicated delimited ranges
+-- like strings with escape characters and balanced parentheses. *single_line*
+-- indicates whether or not the range must be on a single line, *no_escape*
+-- indicates whether or not to ignore '\' as an escape character, and *balanced*
+-- indicates whether or not to handle balanced ranges like parentheses and
+-- requires *chars* to be composed of two characters.
+-- @param chars The character(s) that bound the matched range.
+-- @param single_line Optional flag indicating whether or not the range must be
+-- on a single line.
+-- @param no_escape Optional flag indicating whether or not the range end
+-- character may be escaped by a '\\' character.
+-- @param balanced Optional flag indicating whether or not to match a balanced
+-- range, like the "%b" Lua pattern. This flag only applies if *chars*
+-- consists of two different characters (e.g. "()").
+-- @return pattern
+-- @usage local dq_str_escapes = lexer.delimited_range('"')
+-- @usage local dq_str_noescapes = lexer.delimited_range('"', false, true)
+-- @usage local unbalanced_parens = lexer.delimited_range('()')
+-- @usage local balanced_parens = lexer.delimited_range('()', false, false,
+-- true)
+-- @see nested_pair
+-- @name delimited_range
+function M.delimited_range(chars, single_line, no_escape, balanced)
+ local s = chars:sub(1, 1)
+ local e = #chars == 2 and chars:sub(2, 2) or s
+ local range
+ local b = balanced and s or ''
+ local n = single_line and '\n' or ''
+ if no_escape then
+ local invalid = lpeg_S(e..n..b)
+ range = M.any - invalid
+ else
+ local invalid = lpeg_S(e..n..b) + '\\'
+ range = M.any - invalid + '\\' * M.any
+ end
+ if balanced and s ~= e then
+ return lpeg_P{s * (range + lpeg_V(1))^0 * e}
+ else
+ return s * range^0 * lpeg_P(e)^-1
+ end
+end
+
+---
+-- Creates and returns a pattern that matches pattern *patt* only at the
+-- beginning of a line.
+-- @param patt The LPeg pattern to match on the beginning of a line.
+-- @return pattern
+-- @usage local preproc = token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+-- lexer.nonnewline^0)
+-- @name starts_line
+function M.starts_line(patt)
+ return lpeg_Cmt(lpeg_C(patt), function(input, index, match, ...)
+ local pos = index - #match
+ if pos == 1 then return index, ... end
+ local char = input:sub(pos - 1, pos - 1)
+ if char == '\n' or char == '\r' or char == '\f' then return index, ... end
+ end)
+end
+
+---
+-- Creates and returns a pattern that verifies that string set *s* contains the
+-- first non-whitespace character behind the current match position.
+-- @param s String character set like one passed to `lpeg.S()`.
+-- @return pattern
+-- @usage local regex = lexer.last_char_includes('+-*!%^&|=,([{') *
+-- lexer.delimited_range('/')
+-- @name last_char_includes
+function M.last_char_includes(s)
+ s = '['..s:gsub('[-%%%[]', '%%%1')..']'
+ return lpeg_P(function(input, index)
+ if index == 1 then return index end
+ local i = index
+ while input:sub(i - 1, i - 1):match('[ \t\r\n\f]') do i = i - 1 end
+ if input:sub(i - 1, i - 1):match(s) then return index end
+ end)
+end
+
+---
+-- Returns a pattern that matches a balanced range of text that starts with
+-- string *start_chars* and ends with string *end_chars*.
+-- With single-character delimiters, this function is identical to
+-- `delimited_range(start_chars..end_chars, false, true, true)`.
+-- @param start_chars The string starting a nested sequence.
+-- @param end_chars The string ending a nested sequence.
+-- @return pattern
+-- @usage local nested_comment = lexer.nested_pair('/*', '*/')
+-- @see delimited_range
+-- @name nested_pair
+function M.nested_pair(start_chars, end_chars)
+ local s, e = start_chars, lpeg_P(end_chars)^-1
+ return lpeg_P{s * (M.any - s - end_chars + lpeg_V(1))^0 * e}
+end
+
+---
+-- Creates and returns a pattern that matches any single word in string *words*.
+-- *case_insensitive* indicates whether or not to ignore case when matching
+-- words.
+-- This is a convenience function for simplifying a set of ordered choice word
+-- patterns.
+-- If *words* is a multi-line string, it may contain Lua line comments (`--`)
+-- that will ultimately be ignored.
+-- @param words A string list of words separated by spaces.
+-- @param case_insensitive Optional boolean flag indicating whether or not the
+-- word match is case-insensitive. The default value is `false`.
+-- @param word_chars Unused legacy parameter.
+-- @return pattern
+-- @usage local keyword = token(lexer.KEYWORD, word_match[[foo bar baz]])
+-- @usage local keyword = token(lexer.KEYWORD, word_match([[foo-bar foo-baz
+-- bar-foo bar-baz baz-foo baz-bar]], true))
+-- @name word_match
+function M.word_match(words, case_insensitive, word_chars)
+ local word_list = {}
+ if type(words) == 'table' then
+ -- Legacy `word_match(word_list, word_chars, case_insensitive)` form.
+ words = table.concat(words, ' ')
+ word_chars, case_insensitive = case_insensitive, word_chars
+ end
+ for word in words:gsub('%-%-[^\n]+', ''):gmatch('%S+') do
+ word_list[case_insensitive and word:lower() or word] = true
+ for char in word:gmatch('[^%w_]') do
+ if not (word_chars or ''):find(char, 1, true) then
+ word_chars = (word_chars or '')..char
+ end
+ end
+ end
+ local chars = M.alnum + '_'
+ if (word_chars or '') ~= '' then chars = chars + lpeg_S(word_chars) end
+ return lpeg_Cmt(chars^1, function(input, index, word)
+ if case_insensitive then word = word:lower() end
+ return word_list[word] and index or nil
+ end)
+end
+
+-- Deprecated legacy function. Use `parent:embed()` instead.
+-- Embeds child lexer *child* in parent lexer *parent* using patterns
+-- *start_rule* and *end_rule*, which signal the beginning and end of the
+-- embedded lexer, respectively.
+-- @param parent The parent lexer.
+-- @param child The child lexer.
+-- @param start_rule The pattern that signals the beginning of the embedded
+-- lexer.
+-- @param end_rule The pattern that signals the end of the embedded lexer.
+-- @usage lexer.embed_lexer(M, css, css_start_rule, css_end_rule)
+-- @usage lexer.embed_lexer(html, M, php_start_rule, php_end_rule)
+-- @usage lexer.embed_lexer(html, ruby, ruby_start_rule, ruby_end_rule)
+-- @see embed
+-- @name embed_lexer
+function M.embed_lexer(parent, child, start_rule, end_rule)
+ if not getmetatable(parent) then process_legacy_lexer(parent) end
+ if not getmetatable(child) then process_legacy_lexer(child) end
+ parent:embed(child, start_rule, end_rule)
+end
+
+-- Determines if the previous line is a comment.
+-- This is used for determining if the current comment line is a fold point.
+-- @param prefix The prefix string defining a comment.
+-- @param text The text passed to a fold function.
+-- @param pos The pos passed to a fold function.
+-- @param line The line passed to a fold function.
+-- @param s The s passed to a fold function.
+local function prev_line_is_comment(prefix, text, pos, line, s)
+ local start = line:find('%S')
+ if start < s and not line:find(prefix, start, true) then return false end
+ local p = pos - 1
+ if text:sub(p, p) == '\n' then
+ p = p - 1
+ if text:sub(p, p) == '\r' then p = p - 1 end
+ if text:sub(p, p) ~= '\n' then
+ while p > 1 and text:sub(p - 1, p - 1) ~= '\n' do p = p - 1 end
+ while text:sub(p, p):find('^[\t ]$') do p = p + 1 end
+ return text:sub(p, p + #prefix - 1) == prefix
+ end
+ end
+ return false
+end
+
+-- Determines if the next line is a comment.
+-- This is used for determining if the current comment line is a fold point.
+-- @param prefix The prefix string defining a comment.
+-- @param text The text passed to a fold function.
+-- @param pos The pos passed to a fold function.
+-- @param line The line passed to a fold function.
+-- @param s The s passed to a fold function.
+local function next_line_is_comment(prefix, text, pos, line, s)
+ local p = text:find('\n', pos + s)
+ if p then
+ p = p + 1
+ while text:sub(p, p):find('^[\t ]$') do p = p + 1 end
+ return text:sub(p, p + #prefix - 1) == prefix
+ end
+ return false
+end
+
+---
+-- Returns a fold function (to be passed to `lexer.add_fold_point()`) that folds
+-- consecutive line comments that start with string *prefix*.
+-- @param prefix The prefix string defining a line comment.
+-- @usage lex:add_fold_point(lexer.COMMENT, '--',
+-- lexer.fold_line_comments('--'))
+-- @usage lex:add_fold_point(lexer.COMMENT, '//',
+-- lexer.fold_line_comments('//'))
+-- @name fold_line_comments
+function M.fold_line_comments(prefix)
+ local property_int = M.property_int
+ return function(text, pos, line, s)
+ if property_int['fold.line.comments'] == 0 then return 0 end
+ if s > 1 and line:match('^%s*()') < s then return 0 end
+ local prev_line_comment = prev_line_is_comment(prefix, text, pos, line, s)
+ local next_line_comment = next_line_is_comment(prefix, text, pos, line, s)
+ if not prev_line_comment and next_line_comment then return 1 end
+ if prev_line_comment and not next_line_comment then return -1 end
+ return 0
+ end
+end
+
+M.property_expanded = setmetatable({}, {
+ -- Returns the string property value associated with string property *key*,
+ -- replacing any "$()" and "%()" expressions with the values of their keys.
+ __index = function(t, key)
+ return M.property[key]:gsub('[$%%]%b()', function(key)
+ return t[key:sub(3, -2)]
+ end)
+ end,
+ __newindex = function() error('read-only property') end
+})
+
+--[[ The functions and fields below were defined in C.
+
+---
+-- Returns the line number of the line that contains position *pos*, which
+-- starts from 1.
+-- @param pos The position to get the line number of.
+-- @return number
+local function line_from_position(pos) end
+]]
+
+return M
diff --git a/lexlua/lexer2.lua b/lexlua/lexer2.lua
new file mode 100644
index 000000000..b32240aab
--- /dev/null
+++ b/lexlua/lexer2.lua
@@ -0,0 +1,1723 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+
+local M = {}
+
+--[=[ This comment is for LuaDoc.
+---
+-- Lexes Scintilla documents and source code with Lua and LPeg.
+--
+-- ## Overview
+--
+-- Lexers highlight the syntax of source code. Scintilla (the editing component
+-- behind [Textadept][] and [SciTE][]) traditionally uses static, compiled C++
+-- lexers which are notoriously difficult to create and/or extend. On the other
+-- hand, Lua makes it easy to to rapidly create new lexers, extend existing
+-- ones, and embed lexers within one another. Lua lexers tend to be more
+-- readable than C++ lexers too.
+--
+-- Lexers are Parsing Expression Grammars, or PEGs, composed with the Lua
+-- [LPeg library][]. The following table comes from the LPeg documentation and
+-- summarizes all you need to know about constructing basic LPeg patterns. This
+-- module provides convenience functions for creating and working with other
+-- more advanced patterns and concepts.
+--
+-- Operator | Description
+-- ---------------------|------------
+-- `lpeg.P(string)` | Matches `string` literally.
+-- `lpeg.P(`_`n`_`)` | Matches exactly _`n`_ characters.
+-- `lpeg.S(string)` | Matches any character in set `string`.
+-- `lpeg.R("`_`xy`_`")` | Matches any character between range `x` and `y`.
+-- `patt^`_`n`_ | Matches at least _`n`_ repetitions of `patt`.
+-- `patt^-`_`n`_ | Matches at most _`n`_ repetitions of `patt`.
+-- `patt1 * patt2` | Matches `patt1` followed by `patt2`.
+-- `patt1 + patt2` | Matches `patt1` or `patt2` (ordered choice).
+-- `patt1 - patt2` | Matches `patt1` if `patt2` does not match.
+-- `-patt` | Equivalent to `("" - patt)`.
+-- `#patt` | Matches `patt` but consumes no input.
+--
+-- The first part of this document deals with rapidly constructing a simple
+-- lexer. The next part deals with more advanced techniques, such as custom
+-- coloring and embedding lexers within one another. Following that is a
+-- discussion about code folding, or being able to tell Scintilla which code
+-- blocks are "foldable" (temporarily hideable from view). After that are
+-- instructions on how to use LPeg lexers with the aforementioned Textadept and
+-- SciTE editors. Finally there are comments on lexer performance and
+-- limitations.
+--
+-- [LPeg library]: http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html
+-- [Textadept]: http://foicica.com/textadept
+-- [SciTE]: http://scintilla.org/SciTE.html
+--
+-- ## Lexer Basics
+--
+-- The *lexers/* directory contains all lexers, including your new one. Before
+-- attempting to write one from scratch though, first determine if your
+-- programming language is similar to any of the 100+ languages supported. If
+-- so, you may be able to copy and modify that lexer, saving some time and
+-- effort. The filename of your lexer should be the name of your programming
+-- language in lower case followed by a *.lua* extension. For example, a new Lua
+-- lexer has the name *lua.lua*.
+--
+-- Note: Try to refrain from using one-character language names like "c", "d",
+-- or "r". For example, Scintillua uses "ansi_c", "dmd", and "rstats",
+-- respectively.
+--
+-- ### New Lexer Template
+--
+-- There is a *lexers/template.txt* file that contains a simple template for a
+-- new lexer. Feel free to use it, replacing the '?'s with the name of your
+-- lexer:
+--
+-- -- ? LPeg lexer.
+--
+-- local l = require('lexer')
+-- local token, word_match = l.token, l.word_match
+-- local P, R, S = lpeg.P, lpeg.R, lpeg.S
+--
+-- local lexer = l.new('?')
+--
+-- -- Whitespace.
+-- local ws = token(l.WHITESPACE, l.space^1)
+-- lexer:add_rule('whitespace', ws)
+--
+-- return lexer
+--
+-- The first 3 lines of code simply define often used convenience variables. The
+-- fourth and last lines [define](#lexer.new) and return the lexer object
+-- Scintilla uses; they are very important and must be part of every lexer. The
+-- fifth line defines something called a "token", an essential building block of
+-- lexers. You will learn about tokens shortly. The sixth line defines a lexer
+-- grammar rule, which you will learn about later, as well as token styles.
+-- Note, however, the `local` prefix in front of variables, which is needed
+-- so-as not to affect Lua's global environment. All in all, this is a minimal,
+-- working lexer that you can build on.
+--
+-- ### Tokens
+--
+-- Take a moment to think about your programming language's structure. What kind
+-- of key elements does it have? In the template shown earlier, one predefined
+-- element all languages have is whitespace. Your language probably also has
+-- elements like comments, strings, and keywords. Lexers refer to these elements
+-- as "tokens". Tokens are the fundamental "building blocks" of lexers. Lexers
+-- break down source code into tokens for coloring, which results in the syntax
+-- highlighting familiar to you. It is up to you how specific your lexer is when
+-- it comes to tokens. Perhaps only distinguishing between keywords and
+-- identifiers is necessary, or maybe recognizing constants and built-in
+-- functions, methods, or libraries is desirable. The Lua lexer, for example,
+-- defines 11 tokens: whitespace, keywords, built-in functions, constants,
+-- built-in libraries, identifiers, strings, comments, numbers, labels, and
+-- operators. Even though constants, built-in functions, and built-in libraries
+-- are subsets of identifiers, Lua programmers find it helpful for the lexer to
+-- distinguish between them all. It is perfectly acceptable to just recognize
+-- keywords and identifiers.
+--
+-- In a lexer, tokens consist of a token name and an LPeg pattern that matches a
+-- sequence of characters recognized as an instance of that token. Create tokens
+-- using the [`lexer.token()`]() function. Let us examine the "whitespace" token
+-- defined in the template shown earlier:
+--
+-- local ws = token(l.WHITESPACE, l.space^1)
+--
+-- At first glance, the first argument does not appear to be a string name and
+-- the second argument does not appear to be an LPeg pattern. Perhaps you
+-- expected something like:
+--
+-- local ws = token('whitespace', S('\t\v\f\n\r ')^1)
+--
+-- The `lexer` (`l`) module actually provides a convenient list of common token
+-- names and common LPeg patterns for you to use. Token names include
+-- [`lexer.DEFAULT`](), [`lexer.WHITESPACE`](), [`lexer.COMMENT`](),
+-- [`lexer.STRING`](), [`lexer.NUMBER`](), [`lexer.KEYWORD`](),
+-- [`lexer.IDENTIFIER`](), [`lexer.OPERATOR`](), [`lexer.ERROR`](),
+-- [`lexer.PREPROCESSOR`](), [`lexer.CONSTANT`](), [`lexer.VARIABLE`](),
+-- [`lexer.FUNCTION`](), [`lexer.CLASS`](), [`lexer.TYPE`](), [`lexer.LABEL`](),
+-- [`lexer.REGEX`](), and [`lexer.EMBEDDED`](). Patterns include
+-- [`lexer.any`](), [`lexer.ascii`](), [`lexer.extend`](), [`lexer.alpha`](),
+-- [`lexer.digit`](), [`lexer.alnum`](), [`lexer.lower`](), [`lexer.upper`](),
+-- [`lexer.xdigit`](), [`lexer.cntrl`](), [`lexer.graph`](), [`lexer.print`](),
+-- [`lexer.punct`](), [`lexer.space`](), [`lexer.newline`](),
+-- [`lexer.nonnewline`](), [`lexer.nonnewline_esc`](), [`lexer.dec_num`](),
+-- [`lexer.hex_num`](), [`lexer.oct_num`](), [`lexer.integer`](),
+-- [`lexer.float`](), and [`lexer.word`](). You may use your own token names if
+-- none of the above fit your language, but an advantage to using predefined
+-- token names is that your lexer's tokens will inherit the universal syntax
+-- highlighting color theme used by your text editor.
+--
+-- #### Example Tokens
+--
+-- So, how might you define other tokens like keywords, comments, and strings?
+-- Here are some examples.
+--
+-- **Keywords**
+--
+-- Instead of matching _n_ keywords with _n_ `P('keyword_`_`n`_`')` ordered
+-- choices, use another convenience function: [`lexer.word_match()`](). It is
+-- much easier and more efficient to write word matches like:
+--
+-- local keyword = token(l.KEYWORD, l.word_match[[
+-- keyword_1 keyword_2 ... keyword_n
+-- ]])
+--
+-- local case_insensitive_keyword = token(l.KEYWORD, l.word_match([[
+-- KEYWORD_1 keyword_2 ... KEYword_n
+-- ]], true))
+--
+-- local hyphened_keyword = token(l.KEYWORD, l.word_match[[
+-- keyword-1 keyword-2 ... keyword-n
+-- ]])
+--
+-- **Comments**
+--
+-- Line-style comments with a prefix character(s) are easy to express with LPeg:
+--
+-- local shell_comment = token(l.COMMENT, '#' * l.nonnewline^0)
+-- local c_line_comment = token(l.COMMENT, '//' * l.nonnewline_esc^0)
+--
+-- The comments above start with a '#' or "//" and go to the end of the line.
+-- The second comment recognizes the next line also as a comment if the current
+-- line ends with a '\' escape character.
+--
+-- C-style "block" comments with a start and end delimiter are also easy to
+-- express:
+--
+-- local c_comment = token(l.COMMENT, '/*' * (l.any - '*/')^0 * P('*/')^-1)
+--
+-- This comment starts with a "/\*" sequence and contains anything up to and
+-- including an ending "\*/" sequence. The ending "\*/" is optional so the lexer
+-- can recognize unfinished comments as comments and highlight them properly.
+--
+-- **Strings**
+--
+-- It is tempting to think that a string is not much different from the block
+-- comment shown above in that both have start and end delimiters:
+--
+-- local dq_str = '"' * (l.any - '"')^0 * P('"')^-1
+-- local sq_str = "'" * (l.any - "'")^0 * P("'")^-1
+-- local simple_string = token(l.STRING, dq_str + sq_str)
+--
+-- However, most programming languages allow escape sequences in strings such
+-- that a sequence like "\\&quot;" in a double-quoted string indicates that the
+-- '&quot;' is not the end of the string. The above token incorrectly matches
+-- such a string. Instead, use the [`lexer.delimited_range()`]() convenience
+-- function.
+--
+-- local dq_str = l.delimited_range('"')
+-- local sq_str = l.delimited_range("'")
+-- local string = token(l.STRING, dq_str + sq_str)
+--
+-- In this case, the lexer treats '\' as an escape character in a string
+-- sequence.
+--
+-- **Numbers**
+--
+-- Most programming languages have the same format for integer and float tokens,
+-- so it might be as simple as using a couple of predefined LPeg patterns:
+--
+-- local number = token(l.NUMBER, l.float + l.integer)
+--
+-- However, some languages allow postfix characters on integers.
+--
+-- local integer = P('-')^-1 * (l.dec_num * S('lL')^-1)
+-- local number = token(l.NUMBER, l.float + l.hex_num + integer)
+--
+-- Your language may need other tweaks, but it is up to you how fine-grained you
+-- want your highlighting to be. After all, you are not writing a compiler or
+-- interpreter!
+--
+-- ### Rules
+--
+-- Programming languages have grammars, which specify valid token structure. For
+-- example, comments usually cannot appear within a string. Grammars consist of
+-- rules, which are simply combinations of tokens. Recall from the lexer
+-- template the [`lexer:add_rule()`]() call, which adds a rule to the lexer's
+-- grammar:
+--
+-- lexer:add_rule('whitespace', ws)
+--
+-- Each rule has an associated name, but rule names are completely arbitrary and
+-- serve only to identify and distinguish between different rules. Rule order is
+-- important: if text does not match the first rule added to the grammar, the
+-- lexer tries to match the second rule added, and so on. Right now this lexer
+-- simply matches whitespace tokens under a rule named "whitespace".
+--
+-- To illustrate the importance of rule order, here is an example of a
+-- simplified Lua lexer:
+--
+-- lexer:add_rule('whitespace', token(l.WHITESPACE, ...))
+-- lexer:add_rule('keyword', token(l.KEYWORD, ...))
+-- lexer:add_rule('identifier', token(l.IDENTIFIER, ...))
+-- lexer:add_rule('string', token(l.STRING, ...))
+-- lexer:add_rule('comment', token(l.COMMENT, ...))
+-- lexer:add_rule('number', token(l.NUMBER, ...))
+-- lexer:add_rule('label', token(l.LABEL, ...))
+-- lexer:add_rule('operator', token(l.OPERATOR, ...))
+--
+-- Note how identifiers come after keywords. In Lua, as with most programming
+-- languages, the characters allowed in keywords and identifiers are in the same
+-- set (alphanumerics plus underscores). If the lexer added the "identifier"
+-- rule before the "keyword" rule, all keywords would match identifiers and thus
+-- incorrectly highlight as identifiers instead of keywords. The same idea
+-- applies to function, constant, etc. tokens that you may want to distinguish
+-- between: their rules should come before identifiers.
+--
+-- So what about text that does not match any rules? For example in Lua, the '!'
+-- character is meaningless outside a string or comment. Normally the lexer
+-- skips over such text. If instead you want to highlight these "syntax errors",
+-- add an additional end rule:
+--
+-- lexer:add_rule('whitespace', ws)
+-- ...
+-- lexer:add_rule('error', token(l.ERROR, l.any))
+--
+-- This identifies and highlights any character not matched by an existing
+-- rule as a `lexer.ERROR` token.
+--
+-- Even though the rules defined in the examples above contain a single token,
+-- rules may consist of multiple tokens. For example, a rule for an HTML tag
+-- could consist of a tag token followed by an arbitrary number of attribute
+-- tokens, allowing the lexer to highlight all tokens separately. That rule
+-- might look something like this:
+--
+-- lexer:add_rule('tag', tag_start * (ws * attributes)^0 * tag_end^-1)
+--
+-- Note however that lexers with complex rules like these are more prone to lose
+-- track of their state, especially if they span multiple lines.
+--
+-- ### Summary
+--
+-- Lexers primarily consist of tokens and grammar rules. At your disposal are a
+-- number of convenience patterns and functions for rapidly creating a lexer. If
+-- you choose to use predefined token names for your tokens, you do not have to
+-- define how the lexer highlights them. The tokens will inherit the default
+-- syntax highlighting color theme your editor uses.
+--
+-- ## Advanced Techniques
+--
+-- ### Styles and Styling
+--
+-- The most basic form of syntax highlighting is assigning different colors to
+-- different tokens. Instead of highlighting with just colors, Scintilla allows
+-- for more rich highlighting, or "styling", with different fonts, font sizes,
+-- font attributes, and foreground and background colors, just to name a few.
+-- The unit of this rich highlighting is called a "style". Styles are simply
+-- strings of comma-separated property settings. By default, lexers associate
+-- predefined token names like `lexer.WHITESPACE`, `lexer.COMMENT`,
+-- `lexer.STRING`, etc. with particular styles as part of a universal color
+-- theme. These predefined styles include [`lexer.STYLE_CLASS`](),
+-- [`lexer.STYLE_COMMENT`](), [`lexer.STYLE_CONSTANT`](),
+-- [`lexer.STYLE_ERROR`](), [`lexer.STYLE_EMBEDDED`](),
+-- [`lexer.STYLE_FUNCTION`](), [`lexer.STYLE_IDENTIFIER`](),
+-- [`lexer.STYLE_KEYWORD`](), [`lexer.STYLE_LABEL`](), [`lexer.STYLE_NUMBER`](),
+-- [`lexer.STYLE_OPERATOR`](), [`lexer.STYLE_PREPROCESSOR`](),
+-- [`lexer.STYLE_REGEX`](), [`lexer.STYLE_STRING`](), [`lexer.STYLE_TYPE`](),
+-- [`lexer.STYLE_VARIABLE`](), and [`lexer.STYLE_WHITESPACE`](). Like with
+-- predefined token names and LPeg patterns, you may define your own styles. At
+-- their core, styles are just strings, so you may create new ones and/or modify
+-- existing ones. Each style consists of the following comma-separated settings:
+--
+-- Setting | Description
+-- ---------------|------------
+-- font:_name_ | The name of the font the style uses.
+-- size:_int_ | The size of the font the style uses.
+-- [not]bold | Whether or not the font face is bold.
+-- weight:_int_ | The weight or boldness of a font, between 1 and 999.
+-- [not]italics | Whether or not the font face is italic.
+-- [not]underlined| Whether or not the font face is underlined.
+-- fore:_color_ | The foreground color of the font face.
+-- back:_color_ | The background color of the font face.
+-- [not]eolfilled | Does the background color extend to the end of the line?
+-- case:_char_ | The case of the font ('u': upper, 'l': lower, 'm': normal).
+-- [not]visible | Whether or not the text is visible.
+-- [not]changeable| Whether the text is changeable or read-only.
+--
+-- Specify font colors in either "#RRGGBB" format, "0xBBGGRR" format, or the
+-- decimal equivalent of the latter. As with token names, LPeg patterns, and
+-- styles, there is a set of predefined color names, but they vary depending on
+-- the current color theme in use. Therefore, it is generally not a good idea to
+-- manually define colors within styles in your lexer since they might not fit
+-- into a user's chosen color theme. Try to refrain from even using predefined
+-- colors in a style because that color may be theme-specific. Instead, the best
+-- practice is to either use predefined styles or derive new color-agnostic
+-- styles from predefined ones. For example, Lua "longstring" tokens use the
+-- existing `lexer.STYLE_STRING` style instead of defining a new one.
+--
+-- #### Example Styles
+--
+-- Defining styles is pretty straightforward. An empty style that inherits the
+-- default theme settings is simply an empty string:
+--
+-- local style_nothing = ''
+--
+-- A similar style but with a bold font face looks like this:
+--
+-- local style_bold = 'bold'
+--
+-- If you want the same style, but also with an italic font face, define the new
+-- style in terms of the old one:
+--
+-- local style_bold_italic = style_bold..',italics'
+--
+-- This allows you to derive new styles from predefined ones without having to
+-- rewrite them. This operation leaves the old style unchanged. Thus if you
+-- had a "static variable" token whose style you wanted to base off of
+-- `lexer.STYLE_VARIABLE`, it would probably look like:
+--
+-- local style_static_var = l.STYLE_VARIABLE..',italics'
+--
+-- The color theme files in the *lexers/themes/* folder give more examples of
+-- style definitions.
+--
+-- ### Token Styles
+--
+-- Lexers use the [`lexer:add_style()`]() function to assign styles to
+-- particular tokens. Recall the token definition and from the lexer template:
+--
+-- local ws = token(l.WHITESPACE, l.space^1)
+-- lexer:add_rule('whitespace', ws)
+--
+-- Why is a style not assigned to the `lexer.WHITESPACE` token? As mentioned
+-- earlier, lexers automatically associate tokens that use predefined token
+-- names with a particular style. Only tokens with custom token names need
+-- manual style associations. As an example, consider a custom whitespace token:
+--
+-- local ws = token('custom_whitespace', l.space^1)
+--
+-- Assigning a style to this token looks like:
+--
+-- lexer:add_style('custom_whitespace', l.STYLE_WHITESPACE)
+--
+-- Do not confuse token names with rule names. They are completely different
+-- entities. In the example above, the lexer associates the "custom_whitespace"
+-- token with the existing style for `lexer.WHITESPACE` tokens. If instead you
+-- prefer to color the background of whitespace a shade of grey, it might look
+-- like:
+--
+-- local custom_style = l.STYLE_WHITESPACE..',back:$(color.grey)'
+-- lexer:add_style('custom_whitespace', custom_style)
+--
+-- Notice that the lexer peforms Scintilla/SciTE-style "$()" property expansion.
+-- You may also use "%()". Remember to refrain from assigning specific colors in
+-- styles, but in this case, all user color themes probably define the
+-- "color.grey" property.
+--
+-- ### Line Lexers
+--
+-- By default, lexers match the arbitrary chunks of text passed to them by
+-- Scintilla. These chunks may be a full document, only the visible part of a
+-- document, or even just portions of lines. Some lexers need to match whole
+-- lines. For example, a lexer for the output of a file "diff" needs to know if
+-- the line started with a '+' or '-' and then style the entire line
+-- accordingly. To indicate that your lexer matches by line, create the lexer
+-- with an extra parameter:
+--
+-- local lexer = l.new('?', {lex_by_line = true})
+--
+-- Now the input text for the lexer is a single line at a time. Keep in mind
+-- that line lexers do not have the ability to look ahead at subsequent lines.
+--
+-- ### Embedded Lexers
+--
+-- Lexers embed within one another very easily, requiring minimal effort. In the
+-- following sections, the lexer being embedded is called the "child" lexer and
+-- the lexer a child is being embedded in is called the "parent". For example,
+-- consider an HTML lexer and a CSS lexer. Either lexer stands alone for styling
+-- their respective HTML and CSS files. However, CSS can be embedded inside
+-- HTML. In this specific case, the CSS lexer is the "child" lexer with the HTML
+-- lexer being the "parent". Now consider an HTML lexer and a PHP lexer. This
+-- sounds a lot like the case with CSS, but there is a subtle difference: PHP
+-- _embeds itself into_ HTML while CSS is _embedded in_ HTML. This fundamental
+-- difference results in two types of embedded lexers: a parent lexer that
+-- embeds other child lexers in it (like HTML embedding CSS), and a child lexer
+-- that embeds itself into a parent lexer (like PHP embedding itself in HTML).
+--
+-- #### Parent Lexer
+--
+-- Before embedding a child lexer into a parent lexer, the parent lexer needs to
+-- load the child lexer. This is done with the [`lexer.load()`]() function. For
+-- example, loading the CSS lexer within the HTML lexer looks like:
+--
+-- local css = l.load('css')
+--
+-- The next part of the embedding process is telling the parent lexer when to
+-- switch over to the child lexer and when to switch back. The lexer refers to
+-- these indications as the "start rule" and "end rule", respectively, and are
+-- just LPeg patterns. Continuing with the HTML/CSS example, the transition from
+-- HTML to CSS is when the lexer encounters a "style" tag with a "type"
+-- attribute whose value is "text/css":
+--
+-- local css_tag = P('<style') * P(function(input, index)
+-- if input:find('^[^>]+type="text/css"', index) then
+-- return index
+-- end
+-- end)
+--
+-- This pattern looks for the beginning of a "style" tag and searches its
+-- attribute list for the text "`type="text/css"`". (In this simplified example,
+-- the Lua pattern does not consider whitespace between the '=' nor does it
+-- consider that using single quotes is valid.) If there is a match, the
+-- functional pattern returns a value instead of `nil`. In this case, the value
+-- returned does not matter because we ultimately want to style the "style" tag
+-- as an HTML tag, so the actual start rule looks like this:
+--
+-- local css_start_rule = #css_tag * tag
+--
+-- Now that the parent knows when to switch to the child, it needs to know when
+-- to switch back. In the case of HTML/CSS, the switch back occurs when the
+-- lexer encounters an ending "style" tag, though the lexer should still style
+-- the tag as an HTML tag:
+--
+-- local css_end_rule = #P('</style>') * tag
+--
+-- Once the parent loads the child lexer and defines the child's start and end
+-- rules, it embeds the child with the [`lexer:embed()`]() function:
+--
+-- lexer:embed(css, css_start_rule, css_end_rule)
+--
+-- #### Child Lexer
+--
+-- The process for instructing a child lexer to embed itself into a parent is
+-- very similar to embedding a child into a parent: first, load the parent lexer
+-- into the child lexer with the [`lexer.load()`]() function and then create
+-- start and end rules for the child lexer. However, in this case, call
+-- [`lexer:embed()`]() with switched arguments. For example, in the PHP lexer:
+--
+-- local html = l.load('html')
+-- local php_start_rule = token('php_tag', '<?php ')
+-- local php_end_rule = token('php_tag', '?>')
+-- lexer:add_style('php_tag', l.STYLE_EMBEDDED)
+-- html:embed(lexer, php_start_rule, php_end_rule)
+--
+-- ### Lexers with Complex State
+--
+-- A vast majority of lexers are not stateful and can operate on any chunk of
+-- text in a document. However, there may be rare cases where a lexer does need
+-- to keep track of some sort of persistent state. Rather than using `lpeg.P`
+-- function patterns that set state variables, it is recommended to make use of
+-- Scintilla's built-in, per-line state integers via [`lexer.line_state`](). It
+-- was designed to accommodate up to 32 bit flags for tracking state.
+-- [`lexer.line_from_position()`]() will return the line for any position given
+-- to an `lpeg.P` function pattern. (Any positions derived from that position
+-- argument will also work.)
+--
+-- Writing stateful lexers is beyond the scope of this document.
+--
+-- ## Code Folding
+--
+-- When reading source code, it is occasionally helpful to temporarily hide
+-- blocks of code like functions, classes, comments, etc. This is the concept of
+-- "folding". In the Textadept and SciTE editors for example, little indicators
+-- in the editor margins appear next to code that can be folded at places called
+-- "fold points". When the user clicks an indicator, the editor hides the code
+-- associated with the indicator until the user clicks the indicator again. The
+-- lexer specifies these fold points and what code exactly to fold.
+--
+-- The fold points for most languages occur on keywords or character sequences.
+-- Examples of fold keywords are "if" and "end" in Lua and examples of fold
+-- character sequences are '{', '}', "/\*", and "\*/" in C for code block and
+-- comment delimiters, respectively. However, these fold points cannot occur
+-- just anywhere. For example, lexers should not recognize fold keywords that
+-- appear within strings or comments. The [`lexer:add_fold_point()`]() function
+-- allows you to conveniently define fold points with such granularity. For
+-- example, consider C:
+--
+-- lexer:add_fold_point(l.OPERATOR, '{', '}')
+-- lexer:add_fold_point(l.COMMENT, '/*', '*/')
+--
+-- The first assignment states that any '{' or '}' that the lexer recognized as
+-- an `lexer.OPERATOR` token is a fold point. Likewise, the second assignment
+-- states that any "/\*" or "\*/" that the lexer recognizes as part of a
+-- `lexer.COMMENT` token is a fold point. The lexer does not consider any
+-- occurrences of these characters outside their defined tokens (such as in a
+-- string) as fold points. How do you specify fold keywords? Here is an example
+-- for Lua:
+--
+-- lexer:add_fold_point(l.KEYWORD, 'if', 'end')
+-- lexer:add_fold_point(l.KEYWORD, 'do', 'end')
+-- lexer:add_fold_point(l.KEYWORD, 'function', 'end')
+-- lexer:add_fold_point(l.KEYWORD, 'repeat', 'until')
+--
+-- If your lexer has case-insensitive keywords as fold points, simply add a
+-- `case_insensitive_fold_points = true` option to [`lexer.new()`](), and
+-- specify keywords in lower case.
+--
+-- If your lexer needs to do some additional processing in order to determine if
+-- a token is a fold point, pass a function that returns an integer to
+-- `lexer:add_fold_point()`. Returning `1` indicates the token is a beginning
+-- fold point and returning `-1` indicates the token is an ending fold point.
+-- Returning `0` indicates the token is not a fold point. For example:
+--
+-- local function fold_strange_token(text, pos, line, s, symbol)
+-- if ... then
+-- return 1 -- beginning fold point
+-- elseif ... then
+-- return -1 -- ending fold point
+-- end
+-- return 0
+-- end
+--
+-- lexer:add_fold_point('strange_token', '|', fold_strange_token)
+--
+-- Any time the lexer encounters a '|' that is a "strange_token", it calls the
+-- `fold_strange_token` function to determine if '|' is a fold point. The lexer
+-- calls these functions with the following arguments: the text to identify fold
+-- points in, the beginning position of the current line in the text to fold,
+-- the current line's text, the position in the current line the fold point text
+-- starts at, and the fold point text itself.
+--
+-- ### Fold by Indentation
+--
+-- Some languages have significant whitespace and/or no delimiters that indicate
+-- fold points. If your lexer falls into this category and you would like to
+-- mark fold points based on changes in indentation, create the lexer with a
+-- `fold_by_indentation = true` option:
+--
+-- local lexer = l.new('?', {fold_by_indentation = true})
+--
+-- ## Using Lexers
+--
+-- ### Textadept
+--
+-- Put your lexer in your *~/.textadept/lexers/* directory so you do not
+-- overwrite it when upgrading Textadept. Also, lexers in this directory
+-- override default lexers. Thus, Textadept loads a user *lua* lexer instead of
+-- the default *lua* lexer. This is convenient for tweaking a default lexer to
+-- your liking. Then add a [file type][] for your lexer if necessary.
+--
+-- [file type]: textadept.file_types.html
+--
+-- ### SciTE
+--
+-- Create a *.properties* file for your lexer and `import` it in either your
+-- *SciTEUser.properties* or *SciTEGlobal.properties*. The contents of the
+-- *.properties* file should contain:
+--
+-- file.patterns.[lexer_name]=[file_patterns]
+-- lexer.$(file.patterns.[lexer_name])=[lexer_name]
+--
+-- where `[lexer_name]` is the name of your lexer (minus the *.lua* extension)
+-- and `[file_patterns]` is a set of file extensions to use your lexer for.
+--
+-- Please note that Lua lexers ignore any styling information in *.properties*
+-- files. Your theme file in the *lexers/themes/* directory contains styling
+-- information.
+--
+-- ## Considerations
+--
+-- ### Performance
+--
+-- There might be some slight overhead when initializing a lexer, but loading a
+-- file from disk into Scintilla is usually more expensive. On modern computer
+-- systems, I see no difference in speed between LPeg lexers and Scintilla's C++
+-- ones. Optimize lexers for speed by re-arranging `lexer:add_rule()` calls so
+-- that the most common rules match first. Do keep in mind that order matters
+-- for similar rules.
+--
+-- In some cases, folding may be far more expensive than lexing, particularly
+-- in lexers with a lot of potential fold points. If your lexer is exhibiting
+-- signs of slowness, try disabling folding your text editor first. If that
+-- speeds things up, you can try reducing the number of fold points you added,
+-- overriding `lexer:fold()` with your own implementation, or simply eliminating
+-- folding support from your lexer.
+--
+-- ### Limitations
+--
+-- Embedded preprocessor languages like PHP cannot completely embed in their
+-- parent languages in that the parent's tokens do not support start and end
+-- rules. This mostly goes unnoticed, but code like
+--
+-- <div id="<?php echo $id; ?>">
+--
+-- will not style correctly.
+--
+-- ### Troubleshooting
+--
+-- Errors in lexers can be tricky to debug. Lexers print Lua errors to
+-- `io.stderr` and `_G.print()` statements to `io.stdout`. Running your editor
+-- from a terminal is the easiest way to see errors as they occur.
+--
+-- ### Risks
+--
+-- Poorly written lexers have the ability to crash Scintilla (and thus its
+-- containing application), so unsaved data might be lost. However, I have only
+-- observed these crashes in early lexer development, when syntax errors or
+-- pattern errors are present. Once the lexer actually starts styling text
+-- (either correctly or incorrectly, it does not matter), I have not observed
+-- any crashes.
+--
+-- ### Acknowledgements
+--
+-- Thanks to Peter Odding for his [lexer post][] on the Lua mailing list
+-- that inspired me, and thanks to Roberto Ierusalimschy for LPeg.
+--
+-- [lexer post]: http://lua-users.org/lists/lua-l/2007-04/msg00116.html
+-- @field path (string)
+-- The path used to search for a lexer to load.
+-- Identical in format to Lua's `package.path` string.
+-- The default value is `package.path`.
+-- @field DEFAULT (string)
+-- The token name for default tokens.
+-- @field WHITESPACE (string)
+-- The token name for whitespace tokens.
+-- @field COMMENT (string)
+-- The token name for comment tokens.
+-- @field STRING (string)
+-- The token name for string tokens.
+-- @field NUMBER (string)
+-- The token name for number tokens.
+-- @field KEYWORD (string)
+-- The token name for keyword tokens.
+-- @field IDENTIFIER (string)
+-- The token name for identifier tokens.
+-- @field OPERATOR (string)
+-- The token name for operator tokens.
+-- @field ERROR (string)
+-- The token name for error tokens.
+-- @field PREPROCESSOR (string)
+-- The token name for preprocessor tokens.
+-- @field CONSTANT (string)
+-- The token name for constant tokens.
+-- @field VARIABLE (string)
+-- The token name for variable tokens.
+-- @field FUNCTION (string)
+-- The token name for function tokens.
+-- @field CLASS (string)
+-- The token name for class tokens.
+-- @field TYPE (string)
+-- The token name for type tokens.
+-- @field LABEL (string)
+-- The token name for label tokens.
+-- @field REGEX (string)
+-- The token name for regex tokens.
+-- @field STYLE_CLASS (string)
+-- The style typically used for class definitions.
+-- @field STYLE_COMMENT (string)
+-- The style typically used for code comments.
+-- @field STYLE_CONSTANT (string)
+-- The style typically used for constants.
+-- @field STYLE_ERROR (string)
+-- The style typically used for erroneous syntax.
+-- @field STYLE_FUNCTION (string)
+-- The style typically used for function definitions.
+-- @field STYLE_KEYWORD (string)
+-- The style typically used for language keywords.
+-- @field STYLE_LABEL (string)
+-- The style typically used for labels.
+-- @field STYLE_NUMBER (string)
+-- The style typically used for numbers.
+-- @field STYLE_OPERATOR (string)
+-- The style typically used for operators.
+-- @field STYLE_REGEX (string)
+-- The style typically used for regular expression strings.
+-- @field STYLE_STRING (string)
+-- The style typically used for strings.
+-- @field STYLE_PREPROCESSOR (string)
+-- The style typically used for preprocessor statements.
+-- @field STYLE_TYPE (string)
+-- The style typically used for static types.
+-- @field STYLE_VARIABLE (string)
+-- The style typically used for variables.
+-- @field STYLE_WHITESPACE (string)
+-- The style typically used for whitespace.
+-- @field STYLE_EMBEDDED (string)
+-- The style typically used for embedded code.
+-- @field STYLE_IDENTIFIER (string)
+-- The style typically used for identifier words.
+-- @field STYLE_DEFAULT (string)
+-- The style all styles are based off of.
+-- @field STYLE_LINENUMBER (string)
+-- The style used for all margins except fold margins.
+-- @field STYLE_BRACELIGHT (string)
+-- The style used for highlighted brace characters.
+-- @field STYLE_BRACEBAD (string)
+-- The style used for unmatched brace characters.
+-- @field STYLE_CONTROLCHAR (string)
+-- The style used for control characters.
+-- Color attributes are ignored.
+-- @field STYLE_INDENTGUIDE (string)
+-- The style used for indentation guides.
+-- @field STYLE_CALLTIP (string)
+-- The style used by call tips if [`buffer.call_tip_use_style`]() is set.
+-- Only the font name, size, and color attributes are used.
+-- @field STYLE_FOLDDISPLAYTEXT (string)
+-- The style used for fold display text.
+-- @field any (pattern)
+-- A pattern that matches any single character.
+-- @field ascii (pattern)
+-- A pattern that matches any ASCII character (codes 0 to 127).
+-- @field extend (pattern)
+-- A pattern that matches any ASCII extended character (codes 0 to 255).
+-- @field alpha (pattern)
+-- A pattern that matches any alphabetic character ('A'-'Z', 'a'-'z').
+-- @field digit (pattern)
+-- A pattern that matches any digit ('0'-'9').
+-- @field alnum (pattern)
+-- A pattern that matches any alphanumeric character ('A'-'Z', 'a'-'z',
+-- '0'-'9').
+-- @field lower (pattern)
+-- A pattern that matches any lower case character ('a'-'z').
+-- @field upper (pattern)
+-- A pattern that matches any upper case character ('A'-'Z').
+-- @field xdigit (pattern)
+-- A pattern that matches any hexadecimal digit ('0'-'9', 'A'-'F', 'a'-'f').
+-- @field cntrl (pattern)
+-- A pattern that matches any control character (ASCII codes 0 to 31).
+-- @field graph (pattern)
+-- A pattern that matches any graphical character ('!' to '~').
+-- @field print (pattern)
+-- A pattern that matches any printable character (' ' to '~').
+-- @field punct (pattern)
+-- A pattern that matches any punctuation character ('!' to '/', ':' to '@',
+-- '[' to ''', '{' to '~').
+-- @field space (pattern)
+-- A pattern that matches any whitespace character ('\t', '\v', '\f', '\n',
+-- '\r', space).
+-- @field newline (pattern)
+-- A pattern that matches any set of end of line characters.
+-- @field nonnewline (pattern)
+-- A pattern that matches any single, non-newline character.
+-- @field nonnewline_esc (pattern)
+-- A pattern that matches any single, non-newline character or any set of end
+-- of line characters escaped with '\'.
+-- @field dec_num (pattern)
+-- A pattern that matches a decimal number.
+-- @field hex_num (pattern)
+-- A pattern that matches a hexadecimal number.
+-- @field oct_num (pattern)
+-- A pattern that matches an octal number.
+-- @field integer (pattern)
+-- A pattern that matches either a decimal, hexadecimal, or octal number.
+-- @field float (pattern)
+-- A pattern that matches a floating point number.
+-- @field word (pattern)
+-- A pattern that matches a typical word. Words begin with a letter or
+-- underscore and consist of alphanumeric and underscore characters.
+-- @field FOLD_BASE (number)
+-- The initial (root) fold level.
+-- @field FOLD_BLANK (number)
+-- Flag indicating that the line is blank.
+-- @field FOLD_HEADER (number)
+-- Flag indicating the line is fold point.
+-- @field fold_level (table, Read-only)
+-- Table of fold level bit-masks for line numbers starting from zero.
+-- Fold level masks are composed of an integer level combined with any of the
+-- following bits:
+--
+-- * `lexer.FOLD_BASE`
+-- The initial fold level.
+-- * `lexer.FOLD_BLANK`
+-- The line is blank.
+-- * `lexer.FOLD_HEADER`
+-- The line is a header, or fold point.
+-- @field indent_amount (table, Read-only)
+-- Table of indentation amounts in character columns, for line numbers
+-- starting from zero.
+-- @field line_state (table)
+-- Table of integer line states for line numbers starting from zero.
+-- Line states can be used by lexers for keeping track of persistent states.
+-- @field property (table)
+-- Map of key-value string pairs.
+-- @field property_expanded (table, Read-only)
+-- Map of key-value string pairs with `$()` and `%()` variable replacement
+-- performed in values.
+-- @field property_int (table, Read-only)
+-- Map of key-value pairs with values interpreted as numbers, or `0` if not
+-- found.
+-- @field style_at (table, Read-only)
+-- Table of style names at positions in the buffer starting from 1.
+module('lexer')]=]
+
+local lpeg = require('lpeg')
+local lpeg_P, lpeg_R, lpeg_S, lpeg_V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+local lpeg_Ct, lpeg_Cc, lpeg_Cp = lpeg.Ct, lpeg.Cc, lpeg.Cp
+local lpeg_Cmt, lpeg_C = lpeg.Cmt, lpeg.C
+local lpeg_match = lpeg.match
+
+M.path = package.path
+
+if not package.searchpath then
+ -- Searches for the given *name* in the given *path*.
+ -- This is an implementation of Lua 5.2's `package.searchpath()` function for
+ -- Lua 5.1.
+ function package.searchpath(name, path)
+ local tried = {}
+ for part in path:gmatch('[^;]+') do
+ local filename = part:gsub('%?', name)
+ local f = io.open(filename, 'r')
+ if f then
+ f:close()
+ return filename
+ end
+ tried[#tried + 1] = string.format("no file '%s'", filename)
+ end
+ return nil, table.concat(tried, '\n')
+ end
+end
+
+local string_upper = string.upper
+-- Default styles.
+local default = {
+ 'nothing', 'whitespace', 'comment', 'string', 'number', 'keyword',
+ 'identifier', 'operator', 'error', 'preprocessor', 'constant', 'variable',
+ 'function', 'class', 'type', 'label', 'regex', 'embedded'
+}
+for i = 1, #default do
+ local name, upper_name = default[i], string_upper(default[i])
+ M[upper_name], M['STYLE_'..upper_name] = name, '$(style.'..name..')'
+end
+-- Predefined styles.
+local predefined = {
+ 'default', 'linenumber', 'bracelight', 'bracebad', 'controlchar',
+ 'indentguide', 'calltip', 'folddisplaytext'
+}
+for i = 1, #predefined do
+ local name, upper_name = predefined[i], string_upper(predefined[i])
+ M[upper_name], M['STYLE_'..upper_name] = name, '$(style.'..name..')'
+end
+
+---
+-- Adds pattern *rule* identified by string *id* to the ordered list of rules
+-- for lexer *lexer*.
+-- @param lexer The lexer to add the given rule to.
+-- @param id The id associated with this rule. It does not have to be the same
+-- as the name passed to `token()`.
+-- @param rule The LPeg pattern of the rule.
+-- @see modify_rule
+-- @name add_rule
+function M.add_rule(lexer, id, rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ if not lexer._RULES then
+ lexer._RULES = {}
+ -- Contains an ordered list (by numerical index) of rule names. This is used
+ -- in conjunction with lexer._RULES for building _TOKENRULE.
+ lexer._RULEORDER = {}
+ end
+ lexer._RULES[id] = rule
+ lexer._RULEORDER[#lexer._RULEORDER + 1] = id
+ lexer:build_grammar()
+end
+
+---
+-- Replaces in lexer *lexer* the existing rule identified by string *id* with
+-- pattern *rule*.
+-- @param lexer The lexer to modify.
+-- @param id The id associated with this rule.
+-- @param rule The LPeg pattern of the rule.
+-- @name modify_rule
+function M.modify_rule(lexer, id, rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ lexer._RULES[id] = rule
+ lexer:build_grammar()
+end
+
+---
+-- Associates string *token_name* in lexer *lexer* with Scintilla style string
+-- *style*.
+-- Style strings are comma-separated property settings. Available property
+-- settings are:
+--
+-- * `font:name`: Font name.
+-- * `size:int`: Font size.
+-- * `bold` or `notbold`: Whether or not the font face is bold.
+-- * `weight:int`: Font weight (between 1 and 999).
+-- * `italics` or `notitalics`: Whether or not the font face is italic.
+-- * `underlined` or `notunderlined`: Whether or not the font face is
+-- underlined.
+-- * `fore:color`: Font face foreground color in "#RRGGBB" or 0xBBGGRR format.
+-- * `back:color`: Font face background color in "#RRGGBB" or 0xBBGGRR format.
+-- * `eolfilled` or `noteolfilled`: Whether or not the background color
+-- extends to the end of the line.
+-- * `case:char`: Font case ('u' for uppercase, 'l' for lowercase, and 'm' for
+-- mixed case).
+-- * `visible` or `notvisible`: Whether or not the text is visible.
+-- * `changeable` or `notchangeable`: Whether or not the text is changeable or
+-- read-only.
+--
+-- Property settings may also contain "$(property.name)" expansions for
+-- properties defined in Scintilla, theme files, etc.
+-- @param lexer The lexer to add a style to.
+-- @param token_name The name of the token to associated with the style.
+-- @param style A style string for Scintilla.
+-- @usage lexer:add_style('longstring', l.STYLE_STRING)
+-- @usage lexer:add_style('deprecated_function', l.STYLE_FUNCTION..',italics')
+-- @usage lexer:add_style('visible_ws',
+-- l.STYLE_WHITESPACE..',back:$(color.grey)')
+-- @name add_style
+function M.add_style(lexer, token_name, style)
+ local num_styles = lexer._numstyles
+ if num_styles == 32 then num_styles = num_styles + 8 end -- skip predefined
+ if num_styles >= 255 then print('Too many styles defined (255 MAX)') end
+ lexer._TOKENSTYLES[token_name], lexer._numstyles = num_styles, num_styles + 1
+ lexer._EXTRASTYLES[token_name] = style
+ -- If the lexer is a proxy or a child that embedded itself, copy this style to
+ -- the parent lexer.
+ if lexer._lexer then lexer._lexer:add_style(token_name, style) end
+end
+
+---
+-- Adds to lexer *lexer* a fold point whose beginning and end tokens are string
+-- *token_name* tokens with string content *start_symbol* and *end_symbol*,
+-- respectively.
+-- In the event that *start_symbol* may or may not be a fold point depending on
+-- context, and that additional processing is required, *end_symbol* may be a
+-- function that ultimately returns `1` (indicating a beginning fold point),
+-- `-1` (indicating an ending fold point), or `0` (indicating no fold point).
+-- That function is passed the following arguments:
+--
+-- * `text`: The text being processed for fold points.
+-- * `pos`: The position in *text* of the beginning of the line currently
+-- being processed.
+-- * `line`: The text of the line currently being processed.
+-- * `s`: The position of *start_symbol* in *line*.
+-- * `symbol`: *start_symbol* itself.
+-- @param lexer The lexer to add a fold point to.
+-- @param token_name The token name of text that indicates a fold point.
+-- @param start_symbol The text that indicates the beginning of a fold point.
+-- @param end_symbol Either the text that indicates the end of a fold point, or
+-- a function that returns whether or not *start_symbol* is a beginning fold
+-- point (1), an ending fold point (-1), or not a fold point at all (0).
+-- @usage lexer:add_fold_point(l.OPERATOR, '{', '}')
+-- @usage lexer:add_fold_point(l.KEYWORD, 'if', 'end')
+-- @usage lexer:add_fold_point(l.COMMENT, '#', l.fold_line_comments('#'))
+-- @usage lexer:add_fold_point('custom', function(text, pos, line, s, symbol)
+-- ... end)
+-- @name add_fold_point
+function M.add_fold_point(lexer, token_name, start_symbol, end_symbol)
+ if not lexer._FOLDPOINTS then lexer._FOLDPOINTS = {_SYMBOLS = {}} end
+ local symbols = lexer._FOLDPOINTS._SYMBOLS
+ if not symbols[start_symbol] then
+ symbols[#symbols + 1], symbols[start_symbol] = start_symbol, true
+ end
+ if not lexer._FOLDPOINTS[token_name] then
+ lexer._FOLDPOINTS[token_name] = {}
+ end
+ if type(end_symbol) == 'string' then
+ if not symbols[end_symbol] then
+ symbols[#symbols + 1], symbols[end_symbol] = end_symbol, true
+ end
+ lexer._FOLDPOINTS[token_name][start_symbol] = 1
+ lexer._FOLDPOINTS[token_name][end_symbol] = -1
+ else
+ lexer._FOLDPOINTS[token_name][start_symbol] = end_symbol -- function or int
+ end
+ -- If the lexer is a proxy or a child that embedded itself, copy this fold
+ -- point to the parent lexer.
+ if lexer._lexer then
+ lexer._lexer:add_fold_point(token_name, start_symbol, end_symbol)
+ end
+end
+
+-- (Re)constructs `lexer._TOKENRULE`.
+local function join_tokens(lexer)
+ local patterns, order = lexer._RULES, lexer._RULEORDER
+ local token_rule = patterns[order[1]]
+ for i = 2, #order do token_rule = token_rule + patterns[order[i]] end
+ lexer._TOKENRULE = token_rule + M.token(M.DEFAULT, M.any)
+ return lexer._TOKENRULE
+end
+
+-- Metatable for Scintillua grammars.
+-- These grammars are just tables ultimately passed to `lpeg.P()`.
+local grammar_mt = {__index = {
+ -- Adds lexer *lexer* and any of its embedded lexers to this grammar.
+ -- @param lexer The lexer to add.
+ add_lexer = function(self, lexer)
+ local token_rule = lexer:join_tokens()
+ for i = 1, #lexer._CHILDREN do
+ local child = lexer._CHILDREN[i]
+ if child._CHILDREN then self:add_lexer(child) end
+ local rules = child._EMBEDDEDRULES[lexer._NAME]
+ local rules_token_rule = self['__'..child._NAME] or rules.token_rule
+ self[child._NAME] = (-rules.end_rule * rules_token_rule)^0 *
+ rules.end_rule^-1 * lpeg_V(lexer._NAME)
+ local embedded_child = '_'..child._NAME
+ self[embedded_child] = rules.start_rule *
+ (-rules.end_rule * rules_token_rule)^0 *
+ rules.end_rule^-1
+ token_rule = lpeg_V(embedded_child) + token_rule
+ end
+ self['__'..lexer._NAME] = token_rule -- can contain embedded lexer rules
+ self[lexer._NAME] = token_rule^0
+ end
+}}
+
+-- (Re)constructs `lexer._GRAMMAR`.
+-- @param initial_rule The name of the rule to start lexing with. The default
+-- value is `lexer._NAME`. Multilang lexers use this to start with a child
+-- rule if necessary.
+local function build_grammar(lexer, initial_rule)
+ if not lexer._RULES then return end
+ if lexer._CHILDREN then
+ if not initial_rule then initial_rule = lexer._NAME end
+ local grammar = setmetatable({initial_rule}, grammar_mt)
+ grammar:add_lexer(lexer)
+ lexer._INITIALRULE = initial_rule
+ lexer._GRAMMAR = lpeg_Ct(lpeg_P(grammar))
+ else
+ lexer._GRAMMAR = lpeg_Ct(lexer:join_tokens()^0)
+ end
+end
+
+---
+-- Embeds child lexer *child* in parent lexer *lexer* using patterns
+-- *start_rule* and *end_rule*, which signal the beginning and end of the
+-- embedded lexer, respectively.
+-- @param lexer The parent lexer.
+-- @param child The child lexer.
+-- @param start_rule The pattern that signals the beginning of the embedded
+-- lexer.
+-- @param end_rule The pattern that signals the end of the embedded lexer.
+-- @usage html:embed(css, css_start_rule, css_end_rule)
+-- @usage html:embed(lexer, php_start_rule, php_end_rule) -- from php lexer
+-- @name embed
+function M.embed(lexer, child, start_rule, end_rule)
+ if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent
+ -- Add child rules.
+ if not child._EMBEDDEDRULES then child._EMBEDDEDRULES = {} end
+ if not child._RULES then error('Cannot embed lexer with no rules') end
+ child._EMBEDDEDRULES[lexer._NAME] = {
+ ['start_rule'] = start_rule,
+ token_rule = child:join_tokens(),
+ ['end_rule'] = end_rule
+ }
+ if not lexer._CHILDREN then lexer._CHILDREN = {} end
+ local children = lexer._CHILDREN
+ children[#children + 1] = child
+ -- Add child styles.
+ for token, style in pairs(child._EXTRASTYLES) do
+ lexer:add_style(token, style)
+ end
+ -- Add child fold symbols.
+ if child._FOLDPOINTS then
+ for token_name, symbols in pairs(child._FOLDPOINTS) do
+ if token_name ~= '_SYMBOLS' then
+ for symbol, v in pairs(symbols) do
+ lexer:add_fold_point(token_name, symbol, v)
+ end
+ end
+ end
+ end
+ lexer:build_grammar()
+ child._lexer = lexer -- use parent's tokens if child is embedding itself
+end
+
+---
+-- Lexes a chunk of text *text* (that has an initial style number of
+-- *init_style*) using lexer *lexer*, returning a table of token names and
+-- positions.
+-- @param lexer The lexer to lex text with.
+-- @param text The text in the buffer to lex.
+-- @param init_style The current style. Multiple-language lexers use this to
+-- determine which language to start lexing in.
+-- @return table of token names and positions.
+-- @name lex
+function M.lex(lexer, text, init_style)
+ if not lexer._GRAMMAR then return {M.DEFAULT, #text + 1} end
+ if not lexer._LEXBYLINE then
+ -- For multilang lexers, build a new grammar whose initial_rule is the
+ -- current language.
+ if lexer._CHILDREN then
+ for style, style_num in pairs(lexer._TOKENSTYLES) do
+ if style_num == init_style then
+ local lexer_name = style:match('^(.+)_whitespace') or lexer._NAME
+ if lexer._INITIALRULE ~= lexer_name then
+ lexer:build_grammar(lexer_name)
+ end
+ break
+ end
+ end
+ end
+ return lpeg_match(lexer._GRAMMAR, text)
+ else
+ local tokens = {}
+ local function append(tokens, line_tokens, offset)
+ for i = 1, #line_tokens, 2 do
+ tokens[#tokens + 1] = line_tokens[i]
+ tokens[#tokens + 1] = line_tokens[i + 1] + offset
+ end
+ end
+ local offset = 0
+ local grammar = lexer._GRAMMAR
+ for line in text:gmatch('[^\r\n]*\r?\n?') do
+ local line_tokens = lpeg_match(grammar, line)
+ if line_tokens then append(tokens, line_tokens, offset) end
+ offset = offset + #line
+ -- Use the default style to the end of the line if none was specified.
+ if tokens[#tokens] ~= offset then
+ tokens[#tokens + 1], tokens[#tokens + 2] = 'default', offset + 1
+ end
+ end
+ return tokens
+ end
+end
+
+---
+-- Determines fold points in a chunk of text *text* using lexer *lexer*,
+-- returning a table of fold levels associated with line numbers.
+-- *text* starts at position *start_pos* on line number *start_line* with a
+-- beginning fold level of *start_level* in the buffer.
+-- @param lexer The lexer to fold text with.
+-- @param text The text in the buffer to fold.
+-- @param start_pos The position in the buffer *text* starts at, starting at
+-- zero.
+-- @param start_line The line number *text* starts on.
+-- @param start_level The fold level *text* starts on.
+-- @return table of fold levels associated with line numbers.
+-- @name fold
+function M.fold(lexer, text, start_pos, start_line, start_level)
+ local folds = {}
+ if text == '' then return folds end
+ local fold = M.property_int['fold'] > 0
+ local FOLD_BASE = M.FOLD_BASE
+ local FOLD_HEADER, FOLD_BLANK = M.FOLD_HEADER, M.FOLD_BLANK
+ if fold and lexer._FOLDPOINTS then
+ local lines = {}
+ for p, l in (text..'\n'):gmatch('()(.-)\r?\n') do
+ lines[#lines + 1] = {p, l}
+ end
+ local fold_zero_sum_lines = M.property_int['fold.on.zero.sum.lines'] > 0
+ local fold_points = lexer._FOLDPOINTS
+ local fold_point_symbols = fold_points._SYMBOLS
+ local style_at, fold_level = M.style_at, M.fold_level
+ local line_num, prev_level = start_line, start_level
+ local current_level = prev_level
+ for i = 1, #lines do
+ local pos, line = lines[i][1], lines[i][2]
+ if line ~= '' then
+ if lexer._CASEINSENSITIVEFOLDPOINTS then line = line:lower() end
+ local level_decreased = false
+ for j = 1, #fold_point_symbols do
+ local symbol = fold_point_symbols[j]
+ local word = not symbol:find('[^%w_]')
+ local s, e = line:find(symbol, 1, true)
+ while s and e do
+ --if not word or line:find('^%f[%w_]'..symbol..'%f[^%w_]', s) then
+ if not word or not ((s > 1 and line:find('^[%w_]', s - 1)) or
+ line:find('^[%w_]', e + 1)) then
+ local symbols = fold_points[style_at[start_pos + pos + s - 1]]
+ local level = symbols and symbols[symbol]
+ if type(level) == 'function' then
+ level = level(text, pos, line, s, symbol)
+ end
+ if type(level) == 'number' then
+ current_level = current_level + level
+ if level < 0 and current_level < prev_level then
+ -- Potential zero-sum line. If the level were to go back up on
+ -- the same line, the line may be marked as a fold header.
+ level_decreased = true
+ end
+ end
+ end
+ s = line:find(fold_point_symbols[j], s + 1, true)
+ end
+ end
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ elseif level_decreased and current_level == prev_level and
+ fold_zero_sum_lines then
+ if line_num > start_line then
+ folds[line_num] = prev_level - 1 + FOLD_HEADER
+ else
+ -- Typing within a zero-sum line.
+ local level = fold_level[line_num - 1] - 1
+ if level > FOLD_HEADER then level = level - FOLD_HEADER end
+ if level > FOLD_BLANK then level = level - FOLD_BLANK end
+ folds[line_num] = level + FOLD_HEADER
+ current_level = current_level + 1
+ end
+ end
+ if current_level < FOLD_BASE then current_level = FOLD_BASE end
+ prev_level = current_level
+ else
+ folds[line_num] = prev_level + FOLD_BLANK
+ end
+ line_num = line_num + 1
+ end
+ elseif fold and (lexer._FOLDBYINDENTATION or
+ M.property_int['fold.by.indentation'] > 0) then
+ -- Indentation based folding.
+ -- Calculate indentation per line.
+ local indentation = {}
+ for indent, line in (text..'\n'):gmatch('([\t ]*)([^\r\n]*)\r?\n') do
+ indentation[#indentation + 1] = line ~= '' and #indent
+ end
+ -- Find the first non-blank line before start_line. If the current line is
+ -- indented, make that previous line a header and update the levels of any
+ -- blank lines inbetween. If the current line is blank, match the level of
+ -- the previous non-blank line.
+ local current_level = start_level
+ for i = start_line - 1, 0, -1 do
+ local level = M.fold_level[i]
+ if level >= FOLD_HEADER then level = level - FOLD_HEADER end
+ if level < FOLD_BLANK then
+ local indent = M.indent_amount[i]
+ if indentation[1] and indentation[1] > indent then
+ folds[i] = FOLD_BASE + indent + FOLD_HEADER
+ for j = i + 1, start_line - 1 do
+ folds[j] = start_level + FOLD_BLANK
+ end
+ elseif not indentation[1] then
+ current_level = FOLD_BASE + indent
+ end
+ break
+ end
+ end
+ -- Iterate over lines, setting fold numbers and fold flags.
+ for i = 1, #indentation do
+ if indentation[i] then
+ current_level = FOLD_BASE + indentation[i]
+ folds[start_line + i - 1] = current_level
+ for j = i + 1, #indentation do
+ if indentation[j] then
+ if FOLD_BASE + indentation[j] > current_level then
+ folds[start_line + i - 1] = current_level + FOLD_HEADER
+ current_level = FOLD_BASE + indentation[j] -- for any blanks below
+ end
+ break
+ end
+ end
+ else
+ folds[start_line + i - 1] = current_level + FOLD_BLANK
+ end
+ end
+ else
+ -- No folding, reset fold levels if necessary.
+ local current_line = start_line
+ for _ in text:gmatch('\r?\n') do
+ folds[current_line] = start_level
+ current_line = current_line + 1
+ end
+ end
+ return folds
+end
+
+---
+-- Creates a returns a new lexer with the given name.
+-- @param name The lexer's name.
+-- @param opts Table of lexer options. Options currently supported:
+-- * `lex_by_line`: Whether or not the lexer only processes whole lines of
+-- text (instead of arbitrary chunks of text) at a time.
+-- Line lexers cannot look ahead to subsequent lines.
+-- The default value is `false`.
+-- * `fold_by_indentation`: Whether or not the lexer does not define any fold
+-- points and that fold points should be calculated based on changes in line
+-- indentation.
+-- The default value is `false`.
+-- * `case_insensitive_fold_points`: Whether or not fold points added via
+-- `lexer:add_fold_point()` ignore case.
+-- The default value is `false`.
+-- * `inherit`: Lexer to inherit from.
+-- The default value is `nil`.
+-- @usage l.new('rhtml', {inherit = l.load('html')})
+-- @name new
+function M.new(name, opts)
+ local lexer = {
+ _NAME = assert(name, 'lexer name expected'),
+ _LEXBYLINE = opts and opts['lex_by_line'],
+ _FOLDBYINDENTATION = opts and opts['fold_by_indentation'],
+ _CASEINSENSITIVEFOLDPOINTS = opts and opts['case_insensitive_fold_points'],
+ _lexer = opts and opts['inherit']
+ }
+
+ -- Create the initial maps for token names to style numbers and styles.
+ local token_styles = {}
+ for i = 1, #default do token_styles[default[i]] = i - 1 end
+ for i = 1, #predefined do token_styles[predefined[i]] = i + 31 end
+ lexer._TOKENSTYLES, lexer._numstyles = token_styles, #default
+ lexer._EXTRASTYLES = {}
+
+ return setmetatable(lexer, {__index = {
+ add_rule = M.add_rule, modify_rule = M.modify_rule, add_style = M.add_style,
+ add_fold_point = M.add_fold_point, join_tokens = join_tokens,
+ build_grammar = build_grammar, embed = M.embed, lex = M.lex, fold = M.fold
+ }})
+end
+
+-- Legacy support for older lexers.
+-- Processes the `lexer._rules`, `lexer._tokenstyles`, and `lexer._foldsymbols`
+-- tables.
+-- Since legacy lexers may be processed up to twice, ensure their default styles
+-- and rules are not processed more than once.
+local function process_legacy_lexer(lexer)
+ local function warn(msg) --[[io.stderr:write(msg, "\n")]] end
+ if not lexer._LEGACY then
+ lexer._LEGACY = true
+ warn("lexers as tables are deprecated; use 'lexer.new()'")
+ local token_styles = {}
+ for i = 1, #default do token_styles[default[i]] = i - 1 end
+ for i = 1, #predefined do token_styles[predefined[i]] = i + 31 end
+ lexer._TOKENSTYLES, lexer._numstyles = token_styles, #default
+ lexer._EXTRASTYLES = {}
+ setmetatable(lexer, getmetatable(M.new('')))
+ if lexer._rules then
+ warn("lexer '_rules' table is deprecated; use 'add_rule()'")
+ for i = 1, #lexer._rules do
+ lexer:add_rule(lexer._rules[i][1], lexer._rules[i][2])
+ end
+ end
+ end
+ if lexer._tokenstyles then
+ warn("lexer '_tokenstyles' table is deprecated; use 'add_style()'")
+ for token, style in pairs(lexer._tokenstyles) do
+ -- If this legacy lexer is being processed a second time, only add styles
+ -- added since the first processing.
+ if not lexer._TOKENSTYLES[token] then lexer:add_style(token, style) end
+ end
+ end
+ if lexer._foldsymbols then
+ warn("lexer '_foldsymbols' table is deprecated; use 'add_fold_point()'")
+ for token_name, symbols in pairs(lexer._foldsymbols) do
+ if type(symbols) == 'table' and token_name ~= '_patterns' then
+ for symbol, v in pairs(symbols) do
+ lexer:add_fold_point(token_name, symbol, v)
+ end
+ end
+ end
+ if lexer._foldsymbols._case_insensitive then
+ lexer._CASEINSENSITIVEFOLDPOINTS = true
+ end
+ end
+end
+
+local lexers = {} -- cache of loaded lexers
+---
+-- Initializes or loads and returns the lexer of string name *name*.
+-- Scintilla calls this function in order to load a lexer. Parent lexers also
+-- call this function in order to load child lexers and vice-versa. The user
+-- calls this function in order to load a lexer when using Scintillua as a Lua
+-- library.
+-- @param name The name of the lexing language.
+-- @param alt_name The alternate name of the lexing language. This is useful for
+-- embedding the same child lexer with multiple sets of start and end tokens.
+-- @param cache Flag indicating whether or not to load lexers from the cache.
+-- This should only be `true` when initially loading a lexer (e.g. not from
+-- within another lexer for embedding purposes).
+-- The default value is `false`.
+-- @return lexer object
+-- @name load
+function M.load(name, alt_name, cache)
+ if cache and lexers[alt_name or name] then return lexers[alt_name or name] end
+
+ -- When using Scintillua as a stand-alone module, the `property` and
+ -- `property_int` tables do not exist (they are not useful). Create them in
+ -- order prevent errors from occurring.
+ if not M.property then
+ M.property, M.property_int = {}, setmetatable({}, {
+ __index = function(t, k) return tonumber(M.property[k]) or 0 end,
+ __newindex = function() error('read-only property') end
+ })
+ end
+
+ -- Load the language lexer with its rules, styles, etc.
+ -- However, replace the default `WHITESPACE` style name with a unique
+ -- whitespace style name (and then automatically add it afterwards), since
+ -- embedded lexing relies on these unique whitespace style names. Note that
+ -- loading embedded lexers changes `WHITESPACE` again, so when adding it
+ -- later, do not reference the potentially incorrect value.
+ M.WHITESPACE = (alt_name or name)..'_whitespace'
+ local lexer = dofile(assert(package.searchpath(name, M.path)))
+ assert(lexer, string.format("'%s.lua' did not return a lexer", name))
+ if alt_name then lexer._NAME = alt_name end
+ if not getmetatable(lexer) or lexer._LEGACY then
+ -- A legacy lexer may need to be processed a second time in order to pick up
+ -- any `_tokenstyles` or `_foldsymbols` added after `l.embed_lexer()`.
+ process_legacy_lexer(lexer)
+ if lexer._lexer and lexer._lexer._LEGACY then
+ process_legacy_lexer(lexer._lexer) -- mainly for `_foldsymbols` edits
+ end
+ end
+ lexer:add_style((alt_name or name)..'_whitespace', M.STYLE_WHITESPACE)
+
+ -- If the lexer is a proxy or a child that embedded itself, set the parent to
+ -- be the main lexer.
+ if lexer._lexer then lexer = lexer._lexer end
+
+ lexers[alt_name or name] = lexer
+ return lexer
+end
+
+-- The following are utility functions lexers will have access to.
+
+-- Common patterns.
+M.any = lpeg_P(1)
+M.ascii = lpeg_R('\000\127')
+M.extend = lpeg_R('\000\255')
+M.alpha = lpeg_R('AZ', 'az')
+M.digit = lpeg_R('09')
+M.alnum = lpeg_R('AZ', 'az', '09')
+M.lower = lpeg_R('az')
+M.upper = lpeg_R('AZ')
+M.xdigit = lpeg_R('09', 'AF', 'af')
+M.cntrl = lpeg_R('\000\031')
+M.graph = lpeg_R('!~')
+M.print = lpeg_R(' ~')
+M.punct = lpeg_R('!/', ':@', '[\'', '{~')
+M.space = lpeg_S('\t\v\f\n\r ')
+
+M.newline = lpeg_S('\r\n\f')^1
+M.nonnewline = 1 - M.newline
+M.nonnewline_esc = 1 - (M.newline + '\\') + '\\' * M.any
+
+M.dec_num = M.digit^1
+M.hex_num = '0' * lpeg_S('xX') * M.xdigit^1
+M.oct_num = '0' * lpeg_R('07')^1
+M.integer = lpeg_S('+-')^-1 * (M.hex_num + M.oct_num + M.dec_num)
+M.float = lpeg_S('+-')^-1 *
+ ((M.digit^0 * '.' * M.digit^1 + M.digit^1 * '.' * M.digit^0) *
+ (lpeg_S('eE') * lpeg_S('+-')^-1 * M.digit^1)^-1 +
+ (M.digit^1 * lpeg_S('eE') * lpeg_S('+-')^-1 * M.digit^1))
+
+M.word = (M.alpha + '_') * (M.alnum + '_')^0
+
+---
+-- Creates and returns a token pattern with token name *name* and pattern
+-- *patt*.
+-- If *name* is not a predefined token name, its style must be defined in the
+-- lexer's `_tokenstyles` table.
+-- @param name The name of token. If this name is not a predefined token name,
+-- then a style needs to be assiciated with it in the lexer's `_tokenstyles`
+-- table.
+-- @param patt The LPeg pattern associated with the token.
+-- @return pattern
+-- @usage local ws = token(l.WHITESPACE, l.space^1)
+-- @usage local annotation = token('annotation', '@' * l.word)
+-- @name token
+function M.token(name, patt)
+ return lpeg_Cc(name) * patt * lpeg_Cp()
+end
+
+---
+-- Creates and returns a pattern that matches a range of text bounded by
+-- *chars* characters.
+-- This is a convenience function for matching more complicated delimited ranges
+-- like strings with escape characters and balanced parentheses. *single_line*
+-- indicates whether or not the range must be on a single line, *no_escape*
+-- indicates whether or not to ignore '\' as an escape character, and *balanced*
+-- indicates whether or not to handle balanced ranges like parentheses and
+-- requires *chars* to be composed of two characters.
+-- @param chars The character(s) that bound the matched range.
+-- @param single_line Optional flag indicating whether or not the range must be
+-- on a single line.
+-- @param no_escape Optional flag indicating whether or not the range end
+-- character may be escaped by a '\\' character.
+-- @param balanced Optional flag indicating whether or not to match a balanced
+-- range, like the "%b" Lua pattern. This flag only applies if *chars*
+-- consists of two different characters (e.g. "()").
+-- @return pattern
+-- @usage local dq_str_escapes = l.delimited_range('"')
+-- @usage local dq_str_noescapes = l.delimited_range('"', false, true)
+-- @usage local unbalanced_parens = l.delimited_range('()')
+-- @usage local balanced_parens = l.delimited_range('()', false, false, true)
+-- @see nested_pair
+-- @name delimited_range
+function M.delimited_range(chars, single_line, no_escape, balanced)
+ local s = chars:sub(1, 1)
+ local e = #chars == 2 and chars:sub(2, 2) or s
+ local range
+ local b = balanced and s or ''
+ local n = single_line and '\n' or ''
+ if no_escape then
+ local invalid = lpeg_S(e..n..b)
+ range = M.any - invalid
+ else
+ local invalid = lpeg_S(e..n..b) + '\\'
+ range = M.any - invalid + '\\' * M.any
+ end
+ if balanced and s ~= e then
+ return lpeg_P{s * (range + lpeg_V(1))^0 * e}
+ else
+ return s * range^0 * lpeg_P(e)^-1
+ end
+end
+
+---
+-- Creates and returns a pattern that matches pattern *patt* only at the
+-- beginning of a line.
+-- @param patt The LPeg pattern to match on the beginning of a line.
+-- @return pattern
+-- @usage local preproc = token(l.PREPROCESSOR, l.starts_line('#') *
+-- l.nonnewline^0)
+-- @name starts_line
+function M.starts_line(patt)
+ return lpeg_Cmt(lpeg_C(patt), function(input, index, match, ...)
+ local pos = index - #match
+ if pos == 1 then return index, ... end
+ local char = input:sub(pos - 1, pos - 1)
+ if char == '\n' or char == '\r' or char == '\f' then return index, ... end
+ end)
+end
+
+---
+-- Creates and returns a pattern that verifies that string set *s* contains the
+-- first non-whitespace character behind the current match position.
+-- @param s String character set like one passed to `lpeg.S()`.
+-- @return pattern
+-- @usage local regex = l.last_char_includes('+-*!%^&|=,([{') *
+-- l.delimited_range('/')
+-- @name last_char_includes
+function M.last_char_includes(s)
+ s = '['..s:gsub('[-%%%[]', '%%%1')..']'
+ return lpeg_P(function(input, index)
+ if index == 1 then return index end
+ local i = index
+ while input:sub(i - 1, i - 1):match('[ \t\r\n\f]') do i = i - 1 end
+ if input:sub(i - 1, i - 1):match(s) then return index end
+ end)
+end
+
+---
+-- Returns a pattern that matches a balanced range of text that starts with
+-- string *start_chars* and ends with string *end_chars*.
+-- With single-character delimiters, this function is identical to
+-- `delimited_range(start_chars..end_chars, false, true, true)`.
+-- @param start_chars The string starting a nested sequence.
+-- @param end_chars The string ending a nested sequence.
+-- @return pattern
+-- @usage local nested_comment = l.nested_pair('/*', '*/')
+-- @see delimited_range
+-- @name nested_pair
+function M.nested_pair(start_chars, end_chars)
+ local s, e = start_chars, lpeg_P(end_chars)^-1
+ return lpeg_P{s * (M.any - s - end_chars + lpeg_V(1))^0 * e}
+end
+
+---
+-- Creates and returns a pattern that matches any single word in string *words*.
+-- *case_insensitive* indicates whether or not to ignore case when matching
+-- words.
+-- This is a convenience function for simplifying a set of ordered choice word
+-- patterns.
+-- @param words A string list of words separated by spaces.
+-- @param case_insensitive Optional boolean flag indicating whether or not the
+-- word match is case-insensitive. The default value is `false`.
+-- @param word_chars Unused legacy parameter.
+-- @return pattern
+-- @usage local keyword = token(l.KEYWORD, word_match[[foo bar baz]])
+-- @usage local keyword = token(l.KEYWORD, word_match([[foo-bar foo-baz
+-- bar-foo bar-baz baz-foo baz-bar]], true))
+-- @name word_match
+function M.word_match(words, case_insensitive, word_chars)
+ local word_list = {}
+ if type(words) == 'table' then
+ -- Legacy `word_match(word_list, word_chars, case_insensitive)` form.
+ words = table.concat(words, ' ')
+ word_chars, case_insensitive = case_insensitive, word_chars
+ end
+ for word in words:gmatch('%S+') do
+ word_list[case_insensitive and word:lower() or word] = true
+ for char in word:gmatch('[^%w_]') do
+ if not (word_chars or ''):find(char, 1, true) then
+ word_chars = (word_chars or '')..char
+ end
+ end
+ end
+ local chars = M.alnum + '_'
+ if (word_chars or '') ~= '' then chars = chars + lpeg_S(word_chars) end
+ return lpeg_Cmt(chars^1, function(input, index, word)
+ if case_insensitive then word = word:lower() end
+ return word_list[word] and index or nil
+ end)
+end
+
+-- Deprecated legacy function. Use `parent:embed()` instead.
+-- Embeds child lexer *child* in parent lexer *parent* using patterns
+-- *start_rule* and *end_rule*, which signal the beginning and end of the
+-- embedded lexer, respectively.
+-- @param parent The parent lexer.
+-- @param child The child lexer.
+-- @param start_rule The pattern that signals the beginning of the embedded
+-- lexer.
+-- @param end_rule The pattern that signals the end of the embedded lexer.
+-- @usage l.embed_lexer(M, css, css_start_rule, css_end_rule)
+-- @usage l.embed_lexer(html, M, php_start_rule, php_end_rule)
+-- @usage l.embed_lexer(html, ruby, ruby_start_rule, ruby_end_rule)
+-- @see embed
+-- @name embed_lexer
+function M.embed_lexer(parent, child, start_rule, end_rule)
+ if not getmetatable(parent) then process_legacy_lexer(parent) end
+ if not getmetatable(child) then process_legacy_lexer(child) end
+ parent:embed(child, start_rule, end_rule)
+end
+
+-- Determines if the previous line is a comment.
+-- This is used for determining if the current comment line is a fold point.
+-- @param prefix The prefix string defining a comment.
+-- @param text The text passed to a fold function.
+-- @param pos The pos passed to a fold function.
+-- @param line The line passed to a fold function.
+-- @param s The s passed to a fold function.
+local function prev_line_is_comment(prefix, text, pos, line, s)
+ local start = line:find('%S')
+ if start < s and not line:find(prefix, start, true) then return false end
+ local p = pos - 1
+ if text:sub(p, p) == '\n' then
+ p = p - 1
+ if text:sub(p, p) == '\r' then p = p - 1 end
+ if text:sub(p, p) ~= '\n' then
+ while p > 1 and text:sub(p - 1, p - 1) ~= '\n' do p = p - 1 end
+ while text:sub(p, p):find('^[\t ]$') do p = p + 1 end
+ return text:sub(p, p + #prefix - 1) == prefix
+ end
+ end
+ return false
+end
+
+-- Determines if the next line is a comment.
+-- This is used for determining if the current comment line is a fold point.
+-- @param prefix The prefix string defining a comment.
+-- @param text The text passed to a fold function.
+-- @param pos The pos passed to a fold function.
+-- @param line The line passed to a fold function.
+-- @param s The s passed to a fold function.
+local function next_line_is_comment(prefix, text, pos, line, s)
+ local p = text:find('\n', pos + s)
+ if p then
+ p = p + 1
+ while text:sub(p, p):find('^[\t ]$') do p = p + 1 end
+ return text:sub(p, p + #prefix - 1) == prefix
+ end
+ return false
+end
+
+---
+-- Returns a fold function (to be passed to `lexer:add_fold_point()`) that folds
+-- consecutive line comments that start with string *prefix*.
+-- @param prefix The prefix string defining a line comment.
+-- @usage lexer:add_fold_point(l.COMMENT, '--', l.fold_line_comments('--'))
+-- @usage lexer:add_fold_point(l.COMMENT, '//', l.fold_line_comments('//'))
+-- @name fold_line_comments
+function M.fold_line_comments(prefix)
+ local property_int = M.property_int
+ return function(text, pos, line, s)
+ if property_int['fold.line.comments'] == 0 then return 0 end
+ if s > 1 and line:match('^%s*()') < s then return 0 end
+ local prev_line_comment = prev_line_is_comment(prefix, text, pos, line, s)
+ local next_line_comment = next_line_is_comment(prefix, text, pos, line, s)
+ if not prev_line_comment and next_line_comment then return 1 end
+ if prev_line_comment and not next_line_comment then return -1 end
+ return 0
+ end
+end
+
+M.property_expanded = setmetatable({}, {
+ -- Returns the string property value associated with string property *key*,
+ -- replacing any "$()" and "%()" expressions with the values of their keys.
+ __index = function(t, key)
+ return M.property[key]:gsub('[$%%]%b()', function(key)
+ return t[key:sub(3, -2)]
+ end)
+ end,
+ __newindex = function() error('read-only property') end
+})
+
+--[[ The functions and fields below were defined in C.
+
+---
+-- Returns the line number of the line that contains position *pos*, which
+-- starts from 1.
+-- @param pos The position to get the line number of.
+-- @return number
+local function line_from_position(pos) end
+]]
+
+return M
diff --git a/lexlua/lilypond.lua b/lexlua/lilypond.lua
new file mode 100644
index 000000000..6f68e8249
--- /dev/null
+++ b/lexlua/lilypond.lua
@@ -0,0 +1,31 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- Lilypond LPeg lexer.
+-- TODO Embed Scheme; Notes?, Numbers?
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('lilypond')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords, commands.
+lex:add_rule('keyword', token(lexer.KEYWORD, '\\' * lexer.word))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"', false, true)))
+
+-- Comments.
+-- TODO: block comment.
+lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S("{}'~<>|")))
+
+return lex
diff --git a/lexlua/lisp.lua b/lexlua/lisp.lua
new file mode 100644
index 000000000..bf74fc15c
--- /dev/null
+++ b/lexlua/lisp.lua
@@ -0,0 +1,65 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Lisp LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('lisp')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ defclass defconstant defgeneric define-compiler-macro define-condition
+ define-method-combination define-modify-macro define-setf-expander
+ define-symbol-macro defmacro defmethod defpackage defparameter defsetf
+ defstruct deftype defun defvar
+ abort assert block break case catch ccase cerror cond ctypecase declaim
+ declare do do* do-all-symbols do-external-symbols do-symbols dolist dotimes
+ ecase error etypecase eval-when flet handler-bind handler-case if
+ ignore-errors in-package labels lambda let let* locally loop macrolet
+ multiple-value-bind proclaim prog prog* prog1 prog2 progn progv provide
+ require restart-bind restart-case restart-name return return-from signal
+ symbol-macrolet tagbody the throw typecase unless unwind-protect when
+ with-accessors with-compilation-unit with-condition-restarts
+ with-hash-table-iterator with-input-from-string with-open-file
+ with-open-stream with-output-to-string with-package-iterator
+ with-simple-restart with-slots with-standard-io-syntax
+ t nil
+]]))
+
+local word = lexer.alpha * (lexer.alnum + '_' + '-')^0
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, "'" * word +
+ lexer.delimited_range('"')))
+
+-- Comments.
+local line_comment = ';' * lexer.nonnewline^0
+local block_comment = '#|' * (lexer.any - '|#')^0 * P('|#')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 *
+ (S('./') * lexer.digit^1)^-1))
+
+-- Entities.
+lex:add_rule('entity', token('entity', '&' * word))
+lex:add_style('entity', lexer.STYLE_VARIABLE)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<>=*/+-`@%()')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#|', '|#')
+lex:add_fold_point(lexer.COMMENT, ';', lexer.fold_line_comments(';'))
+
+return lex
diff --git a/lexlua/litcoffee.lua b/lexlua/litcoffee.lua
new file mode 100644
index 000000000..d122ae2db
--- /dev/null
+++ b/lexlua/litcoffee.lua
@@ -0,0 +1,22 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- Literate CoffeeScript LPeg lexer.
+-- http://coffeescript.org/#literate
+
+local lexer = require('lexer')
+local token = lexer.token
+local P, S = lpeg.P, lpeg.S
+
+local lex = lexer.new('litcoffee', {inherit = lexer.load('markdown')})
+
+-- Embedded CoffeeScript.
+local coffeescript = lexer.load('coffeescript')
+local coffee_start_rule = token(lexer.STYLE_EMBEDDED, (P(' ')^4 + P('\t')))
+local coffee_end_rule = token(lexer.STYLE_EMBEDDED, lexer.newline)
+lex:embed(coffeescript, coffee_start_rule, coffee_end_rule)
+
+-- Use 'markdown_whitespace' instead of lexer.WHITESPACE since the latter would
+-- expand to 'litcoffee_whitespace'.
+lex:modify_rule('whitespace', token('markdown_whitespace', S(' \t')^1 +
+ S('\r\n')^1))
+
+return lex
diff --git a/lexlua/logtalk.lua b/lexlua/logtalk.lua
new file mode 100644
index 000000000..c79429426
--- /dev/null
+++ b/lexlua/logtalk.lua
@@ -0,0 +1,35 @@
+-- Copyright © 2017-2018 Michael T. Richter <ttmrichter@gmail.com>. See License.txt.
+-- Logtalk LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('logtalk', {inherit = lexer.load('prolog')})
+
+lex:modify_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Logtalk "keywords" generated from Vim syntax highlighting file with Prolog
+ -- keywords stripped since were building up on the Prolog lexer.
+ abolish_category abolish_events abolish_object abolish_protocol after alias as
+ before built_in calls category category_property coinductive complements
+ complements_object conforms_to_protocol create create_category create_object
+ create_protocol create_logtalk_flag current current_category current_event
+ current_logtalk_flag current_object current_protocol define_events encoding
+ end_category end_class end_object end_protocol extends extends_category
+ extends_object extends_protocol forward implements implements_protocol imports
+ imports_category include info instantiates instantiates_class is
+ logtalk_compile logtalk_library_path logtalk_load logtalk_load_context
+ logtalk_make meta_non_terminal mode object object_property parameter private
+ protected protocol_property self sender set_logtalk_flag specializes
+ specializes_class synchronized this threaded threaded_call threaded_engine
+ threaded_engine_create threaded_engine_destroy threaded_engine_fetch
+ threaded_engine_next threaded_engine_next_reified threaded_engine_post
+ threaded_engine_self threaded_engine_yield threaded_exit threaded_ignore
+ threaded_notify threaded_once threaded_peek threaded_wait uses
+ -- info/1 and info/2 predicates have their own keywords manually extracted
+ -- from documentation.
+ comment argnames arguments author version date parameters parnames copyright
+ license remarks see_also
+]]) + lex:get_rule('keyword'))
+
+return lex
diff --git a/lexlua/lua.lua b/lexlua/lua.lua
new file mode 100644
index 000000000..136b618da
--- /dev/null
+++ b/lexlua/lua.lua
@@ -0,0 +1,159 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Lua LPeg lexer.
+-- Original written by Peter Odding, 2007/04/04.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('lua')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and break do else elseif end false for function if in local nil not or repeat
+ return then true until while
+ -- Added in 5.2.
+ goto
+]]))
+
+-- Functions and deprecated functions.
+local func = token(lexer.FUNCTION, word_match[[
+ assert collectgarbage dofile error getmetatable ipairs load loadfile next
+ pairs pcall print rawequal rawget rawset require select setmetatable tonumber
+ tostring type xpcall
+ -- Added in 5.2.
+ rawlen
+]])
+local deprecated_func = token('deprecated_function', word_match[[
+ -- Deprecated in 5.2.
+ getfenv loadstring module setfenv unpack
+]])
+lex:add_rule('function', func + deprecated_func)
+lex:add_style('deprecated_function', lexer.STYLE_FUNCTION..',italics')
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ _G _VERSION
+ -- Added in 5.2.
+ _ENV
+]]))
+
+-- Libraries and deprecated libraries.
+local library = token('library', word_match[[
+ -- Coroutine.
+ coroutine coroutine.create coroutine.resume coroutine.running coroutine.status
+ coroutine.wrap coroutine.yield
+ -- Coroutine added in 5.3.
+ coroutine.isyieldable
+ -- Module.
+ package package.cpath package.loaded package.loadlib package.path
+ package.preload
+ -- Module added in 5.2.
+ package.config package.searchers package.searchpath
+ -- UTF-8 added in 5.3.
+ utf8 utf8.char utf8.charpattern utf8.codepoint utf8.codes utf8.len utf8.offset
+ -- String.
+ string string.byte string.char string.dump string.find string.format
+ string.gmatch string.gsub string.len string.lower string.match string.rep
+ string.reverse string.sub string.upper
+ -- String added in 5.3.
+ string.pack string.packsize string.unpack
+ -- Table.
+ table table.concat table.insert table.remove table.sort
+ -- Table added in 5.2.
+ table.pack table.unpack
+ -- Table added in 5.3.
+ table.move
+ -- Math.
+ math math.abs math.acos math.asin math.atan math.ceil math.cos math.deg
+ math.exp math.floor math.fmod math.huge math.log math.max math.min math.modf
+ math.pi math.rad math.random math.randomseed math.sin math.sqrt math.tan
+ -- Math added in 5.3.
+ math.maxinteger math.mininteger math.tointeger math.type math.ult
+ -- IO.
+ io io.close io.flush io.input io.lines io.open io.output io.popen io.read
+ io.stderr io.stdin io.stdout io.tmpfile io.type io.write
+ -- OS.
+ os os.clock os.date os.difftime os.execute os.exit os.getenv os.remove
+ os.rename os.setlocale os.time os.tmpname
+ -- Debug.
+ debug debug.debug debug.gethook debug.getinfo debug.getlocal
+ debug.getmetatable debug.getregistry debug.getupvalue debug.sethook
+ debug.setlocal debug.setmetatable debug.setupvalue debug.traceback
+ -- Debug added in 5.2.
+ debug.getuservalue debug.setuservalue debug.upvalueid debug.upvaluejoin
+]])
+local deprecated_library = token('deprecated_library', word_match[[
+ -- Module deprecated in 5.2.
+ package.loaders package.seeall
+ -- Table deprecated in 5.2.
+ table.maxn
+ -- Math deprecated in 5.2.
+ math.log10
+ -- Math deprecated in 5.3.
+ math.atan2 math.cosh math.frexp math.ldexp math.pow math.sinh math.tanh
+ -- Bit32 deprecated in 5.3.
+ bit32 bit32.arshift bit32.band bit32.bnot bit32.bor bit32.btest bit32.extract
+ bit32.lrotate bit32.lshift bit32.replace bit32.rrotate bit32.rshift bit32.xor
+ -- Debug deprecated in 5.2.
+ debug.getfenv debug.setfenv
+]])
+lex:add_rule('library', library + deprecated_library)
+lex:add_style('library', lexer.STYLE_TYPE)
+lex:add_style('deprecated_library', lexer.STYLE_TYPE..',italics')
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[',
+ function(input, index, eq)
+ local _, e = input:find(']'..eq..']', index, true)
+ return (e or #input) + 1
+ end)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')) +
+ token('longstring', longstring))
+lex:add_style('longstring', lexer.STYLE_STRING)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * (longstring +
+ lexer.nonnewline^0)))
+
+-- Numbers.
+local lua_integer = P('-')^-1 * (lexer.hex_num + lexer.dec_num)
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lua_integer))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, '::' * lexer.word * '::'))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, '..' +
+ S('+-*/%^#=<>&|~;:,.{}[]()')))
+
+-- Fold points.
+local function fold_longcomment(text, pos, line, s, symbol)
+ if symbol == '[' then
+ if line:find('^%[=*%[', s) then return 1 end
+ elseif symbol == ']' then
+ if line:find('^%]=*%]', s) then return -1 end
+ end
+ return 0
+end
+lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'function', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until')
+lex:add_fold_point(lexer.COMMENT, '[', fold_longcomment)
+lex:add_fold_point(lexer.COMMENT, ']', fold_longcomment)
+lex:add_fold_point(lexer.COMMENT, '--', lexer.fold_line_comments('--'))
+lex:add_fold_point('longstring', '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+return lex
diff --git a/lexlua/makefile.lua b/lexlua/makefile.lua
new file mode 100644
index 000000000..061424615
--- /dev/null
+++ b/lexlua/makefile.lua
@@ -0,0 +1,90 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Makefile LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('makefile', {lex_by_line = true})
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, P('!')^-1 * word_match([[
+ -- GNU Make conditionals.
+ ifeq ifneq ifdef ifndef else endif
+ -- Other conditionals.
+ if elseif elseifdef elseifndef
+ -- Directives and other keywords.
+ define endef export include override private undefine unexport vpath
+]], true)))
+
+-- Targets.
+local special_target = token(lexer.CONSTANT, word_match[[
+ .PHONY .SUFFIXES .DEFAULT .PRECIOUS .INTERMEDIATE .SECONDARY .SECONDEXPANSION
+ .DELETE_ON_ERROR .IGNORE .LOW_RESOLUTION_TIME .SILENT .EXPORT_ALL_VARIABLES
+ .NOTPARALLEL .ONESHELL .POSIX
+]])
+local normal_target = token('target', (lexer.any - lexer.space - S(':#='))^1)
+lex:add_rule('target', lexer.starts_line((special_target + normal_target) *
+ ws^0 * #(':' * -P('='))))
+lex:add_style('target', lexer.STYLE_LABEL)
+
+-- Variables.
+local word_char = lexer.any - lexer.space - S(':#=(){}')
+local assign = S(':+?')^-1 * '='
+local expanded_var = '$' * ('(' * word_char^1 * ')' + '{' * word_char^1 * '}')
+local auto_var = '$' * S('@%<?^+|*')
+local special_var = word_match[[
+ MAKEFILE_LIST .DEFAULT_GOAL MAKE_RESTARTS .RECIPEPREFIX .VARIABLES .FEATURES
+ .INCLUDE_DIRS GPATH MAKECMDGOALS MAKESHELL SHELL VPATH
+]] * #(ws^0 * assign)
+local implicit_var = word_match[[
+ -- Some common variables.
+ AR AS CC CXX CPP FC M2C PC CO GET LEX YACC LINT MAKEINFO TEX TEXI2DVI WEAVE
+ CWEAVE TANGLE CTANGLE RM
+ -- Some common flag variables.
+ ARFLAGS ASFLAGS CFLAGS CXXFLAGS COFLAGS CPPFLAGS FFLAGS GFLAGS LDFLAGS LFLAGS
+ YFLAGS PFLAGS RFLAGS LINTFLAGS
+ -- Other.
+ DESTDIR MAKE MAKEFLAGS MAKEOVERRIDES MFLAGS
+]] * #(ws^0 * assign)
+local computed_var = token(lexer.OPERATOR, '$' * S('({')) *
+ token(lexer.FUNCTION, word_match[[
+ -- Functions for String Substitution and Analysis.
+ subst patsubst strip findstring filter filter-out sort word wordlist words
+ firstword lastword
+ -- Functions for File Names.
+ dir notdir suffix basename addsuffix addprefix join wildcard realpath abspath
+ -- Functions for Conditionals.
+ if or and
+ -- Miscellaneous Functions.
+ foreach call value eval origin flavor shell
+ -- Functions That Control Make.
+ error warning info
+]])
+local variable = token(lexer.VARIABLE, expanded_var + auto_var + special_var +
+ implicit_var) + computed_var
+lex:add_rule('variable', variable)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, assign + S(':$(){}')))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word_char^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Embedded Bash.
+local bash = lexer.load('bash')
+bash:modify_rule('variable', token(lexer.VARIABLE, '$$' * word_char^1) +
+ bash:get_rule('variable') + variable)
+local bash_start_rule = token(lexer.WHITESPACE, P('\t')) +
+ token(lexer.OPERATOR, P(';'))
+local bash_end_rule = token(lexer.WHITESPACE, P('\n'))
+lex:embed(bash, bash_start_rule, bash_end_rule)
+
+return lex
diff --git a/lexlua/man.lua b/lexlua/man.lua
new file mode 100644
index 000000000..c5f8d5a51
--- /dev/null
+++ b/lexlua/man.lua
@@ -0,0 +1,29 @@
+-- Copyright 2015-2018 David B. Lamkins <david@lamkins.net>. See License.txt.
+-- man/roff LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('man')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Markup.
+lex:add_rule('rule1', token(lexer.STRING, '.' * ('B' * P('R')^-1 +
+ 'I' * P('PR')^-1) *
+ lexer.nonnewline^0))
+lex:add_rule('rule2', token(lexer.NUMBER, '.' * S('ST') * 'H' *
+ lexer.nonnewline^0))
+lex:add_rule('rule3', token(lexer.KEYWORD, P('.br') + '.DS' + '.RS' + '.RE' +
+ '.PD'))
+lex:add_rule('rule4', token(lexer.LABEL, '.' * (S('ST') * 'H' + '.TP')))
+lex:add_rule('rule5', token(lexer.VARIABLE, '.B' * P('R')^-1 +
+ '.I' * S('PR')^-1 +
+ '.PP'))
+lex:add_rule('rule6', token(lexer.TYPE, '\\f' * S('BIPR')))
+lex:add_rule('rule7', token(lexer.PREPROCESSOR, lexer.starts_line('.') *
+ lexer.alpha^1))
+
+return lex
diff --git a/lexlua/markdown.lua b/lexlua/markdown.lua
new file mode 100644
index 000000000..2622a11d7
--- /dev/null
+++ b/lexlua/markdown.lua
@@ -0,0 +1,102 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Markdown LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('markdown')
+
+-- Block elements.
+lex:add_rule('header',
+ token('h6', lexer.starts_line('######') * lexer.nonnewline^0) +
+ token('h5', lexer.starts_line('#####') * lexer.nonnewline^0) +
+ token('h4', lexer.starts_line('####') * lexer.nonnewline^0) +
+ token('h3', lexer.starts_line('###') * lexer.nonnewline^0) +
+ token('h2', lexer.starts_line('##') * lexer.nonnewline^0) +
+ token('h1', lexer.starts_line('#') * lexer.nonnewline^0))
+local font_size = lexer.property_int['fontsize'] > 0 and
+ lexer.property_int['fontsize'] or 10
+local hstyle = 'fore:$(color.red)'
+lex:add_style('h6', hstyle)
+lex:add_style('h5', hstyle..',size:'..(font_size + 1))
+lex:add_style('h4', hstyle..',size:'..(font_size + 2))
+lex:add_style('h3', hstyle..',size:'..(font_size + 3))
+lex:add_style('h2', hstyle..',size:'..(font_size + 4))
+lex:add_style('h1', hstyle..',size:'..(font_size + 5))
+
+lex:add_rule('blockquote',
+ token(lexer.STRING,
+ lpeg.Cmt(lexer.starts_line(S(' \t')^0 * '>'),
+ function(input, index)
+ local _, e = input:find('\n[ \t]*\r?\n', index)
+ return (e or #input) + 1
+ end)))
+
+lex:add_rule('blockcode',
+ token('code', lexer.starts_line(P(' ')^4 + P('\t')) * -P('<') *
+ lexer.nonnewline^0 * lexer.newline^-1))
+lex:add_style('code', lexer.STYLE_EMBEDDED..',eolfilled')
+
+lex:add_rule('hr',
+ token('hr',
+ lpeg.Cmt(lexer.starts_line(S(' \t')^0 * lpeg.C(S('*-_'))),
+ function(input, index, c)
+ local line = input:match('[^\n]*', index)
+ line = line:gsub('[ \t]', '')
+ if line:find('[^'..c..']') or #line < 2 then
+ return nil
+ end
+ return (input:find('\n', index) or #input) + 1
+ end)))
+lex:add_style('hr', 'back:$(color.black),eolfilled')
+
+lex:add_rule('list', token('list', lexer.starts_line(S(' \t')^0 * (S('*+-') +
+ R('09')^1 * '.')) *
+ S(' \t')))
+lex:add_style('list', lexer.STYLE_CONSTANT)
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, S(' \t')^1 + S('\v\r\n')^1)
+lex:add_rule('whitespace', ws)
+
+-- Span elements.
+lex:add_rule('escape', token(lexer.DEFAULT, P('\\') * 1))
+
+lex:add_rule('link_label',
+ token('link_label', lexer.delimited_range('[]') * ':') * ws *
+ token('link_url', (lexer.any - lexer.space)^1) *
+ (ws * token(lexer.STRING, lexer.delimited_range('"', false, true) +
+ lexer.delimited_range("'", false, true) +
+ lexer.delimited_range('()')))^-1)
+lex:add_style('link_label', lexer.STYLE_LABEL)
+lex:add_style('link_url', 'underlined')
+
+lex:add_rule('link',
+ token('link', P('!')^-1 * lexer.delimited_range('[]') *
+ (P('(') * (lexer.any - S(') \t'))^0 *
+ (S(' \t')^1 *
+ lexer.delimited_range('"', false, true))^-1 * ')' +
+ S(' \t')^0 * lexer.delimited_range('[]')) +
+ P('http://') * (lexer.any - lexer.space)^1))
+lex:add_style('link', 'underlined')
+
+lex:add_rule('strong', token('strong', P('**') * (lexer.any - '**')^0 *
+ P('**')^-1 +
+ P('__') * (lexer.any - '__')^0 *
+ P('__')^-1))
+lex:add_style('strong', 'bold')
+lex:add_rule('em', token('em', lexer.delimited_range('*', true) +
+ lexer.delimited_range('_', true)))
+lex:add_style('em', 'italics')
+lex:add_rule('code', token('code', P('``') * (lexer.any - '``')^0 * P('``')^-1 +
+ lexer.delimited_range('`', true, true)))
+
+-- Embedded HTML.
+local html = lexer.load('html')
+local start_rule = lexer.starts_line(S(' \t')^0) * #P('<') *
+ html:get_rule('element')
+local end_rule = token(lexer.DEFAULT, P('\n')) -- TODO: lexer.WHITESPACE errors
+lex:embed(html, start_rule, end_rule)
+
+return lex
diff --git a/lexlua/matlab.lua b/lexlua/matlab.lua
new file mode 100644
index 000000000..2c576556f
--- /dev/null
+++ b/lexlua/matlab.lua
@@ -0,0 +1,86 @@
+-- Copyright 2006-2018 Martin Morawetz. See License.txt.
+-- Matlab LPeg lexer.
+-- Based off of lexer code by Mitchell.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('matlab')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ break case catch continue do else elseif end end_try_catch end_unwind_protect
+ endfor endif endswitch endwhile for function endfunction global if otherwise
+ persistent replot return static switch try until unwind_protect
+ unwind_protect_cleanup varargin varargout while
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs any argvatan2 axes axis ceil cla clear clf columns cos delete diff disp
+ doc double drawnow exp figure find fix floor fprintf gca gcf get grid help
+ hist hold isempty isnull length load log log10 loglog max mean median min mod
+ ndims numel num2str ones pause plot printf quit rand randn rectangle rem
+ repmat reshape round rows save semilogx semilogy set sign sin size sizeof
+ size_equal sort sprintf squeeze sqrt std strcmp subplot sum tan tic title toc
+ uicontrol who xlabel ylabel zeros
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ EDITOR I IMAGEPATH INFO_FILE J LOADPATH OCTAVE_VERSION PAGER PS1 PS2 PS4 PWD
+]]))
+
+-- Variable.
+lex:add_rule('variable', token(lexer.VARIABLE, word_match[[
+ ans automatic_replot default_return_value do_fortran_indexing
+ define_all_return_values empty_list_elements_ok eps false gnuplot_binary
+ ignore_function_time_stamp implicit_str_to_num_ok Inf inf NaN nan
+ ok_to_lose_imaginary_part output_max_field_width output_precision
+ page_screen_output pi prefer_column_vectors prefer_zero_one_indexing
+ print_answer_id_name print_empty_dimensions realmax realmin
+ resize_on_range_error return_last_computed_value save_precision
+ silent_functions split_long_rows suppress_verbose_help_message
+ treat_neg_dim_as_zero true warn_assign_as_truth_value
+ warn_comma_in_global_decl warn_divide_by_zero warn_function_name_clash
+ whitespace_in_literal_matrix
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"') +
+ lexer.delimited_range('`')))
+
+-- Comments.
+local line_comment = (P('%') + '#') * lexer.nonnewline^0
+local block_comment = '%{' * (lexer.any - '%}')^0 * P('%}')^-1
+lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer +
+ lexer.dec_num + lexer.hex_num +
+ lexer.oct_num))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('!%^&*()[]{}-=+/\\|:;.,?<>~`´')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'for', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'while', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'switch', 'end')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.COMMENT, '%{', '%}')
+lex:add_fold_point(lexer.COMMENT, '%', lexer.fold_line_comments('%'))
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/moonscript.lua b/lexlua/moonscript.lua
new file mode 100644
index 000000000..49a98d25f
--- /dev/null
+++ b/lexlua/moonscript.lua
@@ -0,0 +1,141 @@
+-- Copyright 2016-2018 Alejandro Baez (https://keybase.io/baez). See License.txt.
+-- Moonscript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, S, R = lpeg.P, lpeg.S, lpeg.R
+
+local lex = lexer.new('moonscript', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitspace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Lua.
+ and break do else elseif false for if in local nil not or return then true
+ while
+ -- Moonscript.
+ continue class export extends from import super switch unless using when with
+]]))
+
+-- Error words.
+lex:add_rule('error', token(lexer.ERROR, word_match[[function end]]))
+
+-- Self reference.
+lex:add_rule('self_ref', token('self_ref', '@' * lexer.word + 'self'))
+lex:add_style('self_ref', lexer.STYLE_LABEL)
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ assert collectgarbage dofile error getmetatable ipairs load loadfile next
+ pairs pcall print rawequal rawget rawset require select setmetatable tonumber
+ tostring type xpcall
+ -- Added in 5.2.
+ rawlen
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ _G _VERSION
+ -- Added in 5.2.
+ _ENV
+]]))
+
+-- Libraries.
+lex:add_rule('library', token('library', word_match[[
+ -- Coroutine.
+ coroutine coroutine.create coroutine.resume coroutine.running coroutine.status
+ coroutine.wrap coroutine.yield
+ -- Coroutine added in 5.3.
+ coroutine.isyieldable
+ -- Module.
+ package package.cpath package.loaded package.loadlib package.path
+ package.preload
+ -- Module added in 5.2.
+ package.config package.searchers package.searchpath
+ -- UTF-8 added in 5.3.
+ utf8 utf8.char utf8.charpattern utf8.codepoint utf8.codes utf8.len utf8.offset
+ -- String.
+ string string.byte string.char string.dump string.find string.format
+ string.gmatch string.gsub string.len string.lower string.match string.rep
+ string.reverse string.sub string.upper
+ -- String added in 5.3.
+ string.pack string.packsize string.unpack
+ -- Table.
+ table table.concat table.insert table.remove table.sort
+ -- Table added in 5.2.
+ table.pack table.unpack
+ -- Table added in 5.3.
+ table.move
+ -- Math.
+ math math.abs math.acos math.asin math.atan math.ceil math.cos math.deg
+ math.exp math.floor math.fmod math.huge math.log math.max math.min math.modf
+ math.pi math.rad math.random math.randomseed math.sin math.sqrt math.tan
+ -- Math added in 5.3.
+ math.maxinteger math.mininteger math.tointeger math.type math.ult
+ -- IO.
+ io io.close io.flush io.input io.lines io.open io.output io.popen io.read
+ io.stderr io.stdin io.stdout io.tmpfile io.type io.write
+ -- OS.
+ os os.clock os.date os.difftime os.execute os.exit os.getenv os.remove
+ os.rename os.setlocale os.time os.tmpname
+ -- Debug.
+ debug debug.debug debug.gethook debug.getinfo debug.getlocal
+ debug.getmetatable debug.getregistry debug.getupvalue debug.sethook
+ debug.setlocal debug.setmetatable debug.setupvalue debug.traceback
+ -- Debug added in 5.2.
+ debug.getuservalue debug.setuservalue debug.upvalueid debug.upvaluejoin
+
+ --- MoonScript 0.3.1 standard library.
+ -- Printing functions.
+ p
+ -- Table functions.
+ run_with_scope defaultbl extend copy
+ -- Class/object functions.
+ is_object bind_methods mixin mixin_object mixin_table
+ -- Misc functions.
+ fold
+ -- Debug functions.
+ debug.upvalue
+]]))
+lex:add_style('library', lexer.STYLE_TYPE)
+
+-- Identifiers.
+local identifier = token(lexer.IDENTIFIER, lexer.word)
+local proper_ident = token('proper_ident', R('AZ') * lexer.word)
+local tbl_key = token('tbl_key', lexer.word * ':' + ':' * lexer.word )
+lex:add_rule('identifier', tbl_key + proper_ident + identifier)
+lex:add_style('proper_ident', lexer.STYLE_CLASS)
+lex:add_style('tbl_key', lexer.STYLE_REGEX)
+
+local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[',
+ function(input, index, eq)
+ local _, e = input:find(']'..eq..']', index, true)
+ return (e or #input) + 1
+ end)
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"', false, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str) +
+ token('longstring', longstring))
+lex:add_style('longstring', lexer.STYLE_STRING)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * (longstring +
+ lexer.nonnewline^0)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Function definition.
+lex:add_rule('fndef', token('fndef', P('->') + '=>'))
+lex:add_style('fndef', lexer.STYLE_PREPROCESSOR)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-*!\\/%^#=<>;:,.')))
+lex:add_rule('symbol', token('symbol', S('(){}[]')))
+lex:add_style('symbol', lexer.STYLE_EMBEDDED)
+
+return lex
diff --git a/lexlua/mumps.lua b/lexlua/mumps.lua
new file mode 100644
index 000000000..8a7d7d8f1
--- /dev/null
+++ b/lexlua/mumps.lua
@@ -0,0 +1,112 @@
+-- Copyright 2015-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- MUMPS (M) LPeg lexer.
+
+local l = require('lexer')
+local token, word_match = l.token, l.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'mumps'}
+
+-- Whitespace.
+local ws = token(l.WHITESPACE, l.space^1)
+
+-- Comments.
+local comment = token(l.COMMENT, ';' * l.nonnewline_esc^0)
+
+-- Strings.
+local string = token(l.STRING, l.delimited_range('"', true))
+
+-- Numbers.
+local number = token(l.NUMBER, l.float + l.integer) -- TODO: float?
+
+-- Keywords.
+local keyword = token(l.KEYWORD, word_match({
+ -- Abbreviations.
+ 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'q',
+ 'r', 's', 'u', 'v', 'w', 'x',
+ -- Full.
+ 'break', 'close', 'do', 'else', 'for', 'goto', 'halt', 'hang', 'if', 'job',
+ 'kill', 'lock', 'merge', 'new', 'open', 'quit', 'read', 'set', 'use', 'view',
+ 'write', 'xecute',
+ -- Cache- or GTM-specific.
+ 'catch', 'continue', 'elseif', 'tcommit', 'throw', 'trollback', 'try',
+ 'tstart', 'while',
+}, nil, true))
+
+-- Functions.
+local func = token(l.FUNCTION, '$' * word_match({
+ -- Abbreviations.
+ 'a', 'c', 'd', 'e', 'f', 'fn', 'g', 'j', 'l', 'n', 'na', 'o', 'p', 'q', 'ql',
+ 'qs', 'r', 're', 's', 'st', 't', 'tr', 'v',
+ -- Full.
+ 'ascii', 'char', 'data', 'extract', 'find', 'fnumber', 'get', 'justify',
+ 'length', 'next', 'name', 'order', 'piece', 'query', 'qlength', 'qsubscript',
+ 'random', 'reverse', 'select', 'stack', 'text', 'translate', 'view',
+ -- Z function abbreviations.
+ 'zd', 'zdh', 'zdt', 'zdth', 'zh', 'zt', 'zth', 'zu', 'zp',
+ -- Z functions.
+ 'zabs', 'zarccos', 'zarcsin', 'zarctan', 'zcos', 'zcot', 'zcsc', 'zdate',
+ 'zdateh', 'zdatetime', 'zdatetimeh', 'zexp', 'zhex', 'zln', 'zlog', 'zpower',
+ 'zsec', 'zsin', 'zsqr', 'ztan', 'ztime', 'ztimeh', 'zutil', 'zf', 'zprevious',
+ -- Cache- or GTM-specific.
+ 'bit', 'bitcount', 'bitfind', 'bitlogic', 'case', 'classmethod', 'classname',
+ 'decimal', 'double', 'factor', 'i', 'increment', 'inumber', 'isobject',
+ 'isvaliddouble', 'isvalidnum', 'li', 'list', 'lb', 'listbuild', 'ld',
+ 'listdata', 'lf', 'listfind', 'lfs', 'listfromstring', 'lg', 'listget', 'll',
+ 'listlength', 'listnext', 'ls', 'listsame', 'lts', 'listtostring', 'lv',
+ 'listvalid', 'locate', 'match', 'method', 'nc', 'nconvert', 'normalize',
+ 'now', 'num', 'number', 'parameter', 'prefetchoff', 'prefetchon', 'property',
+ 'replace', 'sc', 'sconvert', 'sortbegin', 'sortend', 'wa', 'wascii', 'wc',
+ 'wchar', 'we', 'wextract', 'wf', 'wfind', 'wiswide', 'wl', 'wlength', 'wre',
+ 'wreverse', 'xecute'
+}, nil, true))
+
+-- Variables.
+local variable = token(l.VARIABLE, '$' * l.word_match({
+ -- Abbreviations.
+ 'ec', 'es', 'et', 'h', 'i', 'j', 'k', 'p', 'q', 's', 'st', 't', 'tl',
+ -- Full.
+ 'device', 'ecode', 'estack', 'etrap', 'halt', 'horolog', 'io', 'job',
+ 'namespace', 'principal', 'quit', 'roles', 'storage', 'stack', 'system',
+ 'test', 'this', 'tlevel', 'username', 'x', 'y',
+ -- Z variable abbreviations.
+ 'za', 'zb', 'zc', 'ze', 'zh', 'zi', 'zj', 'zm', 'zn', 'zo', 'zp', 'zr', 'zs',
+ 'zt', 'zts', 'ztz', 'zv',
+ -- Z variables.
+ 'zchild', 'zeof', 'zerror', 'zhorolog', 'zio', 'zjob', 'zmode', 'zname',
+ 'znspace', 'zorder', 'zparent', 'zpi', 'zpos', 'zreference', 'zstorage',
+ 'ztimestamp', 'ztimezone', 'ztrap', 'zversion',
+}, nil, true))
+
+-- Function entity.
+local entity = token(l.LABEL, l.starts_line(('%' + l.alpha) * l.alnum^0))
+
+-- Support functions.
+local support_function = '$$' * ('%' + l.alpha) * l.alnum^0 *
+ (('%' + l.alpha) * l.alnum^0)^-1
+
+-- Identifiers.
+local identifier = token(l.IDENTIFIER, l.alpha * l.alnum^0)
+
+-- Operators.
+local operator = token(l.OPERATOR, S('+-/*<>!=_@#&|?:\\\',()[]'))
+
+M._rules = {
+ {'whitespace', ws},
+ {'keyword', keyword},
+ {'variable', variable},
+ {'identifier', identifier},
+ {'string', string},
+ {'comment', comment},
+ {'number', number},
+ {'operator', operator},
+}
+
+M._foldsymbols = {
+ _patterns = {'%l+', '[{}]', '/%*', '%*/', '//'},
+ [l.PREPROCESSOR] = {['if'] = 1, ifdef = 1, ifndef = 1, endif = -1},
+ [l.OPERATOR] = {['{'] = 1, ['}'] = -1},
+ [l.COMMENT] = {['/*'] = 1, ['*/'] = -1, ['//'] = l.fold_line_comments('//')}
+}
+
+return M
diff --git a/lexlua/myrddin.lua b/lexlua/myrddin.lua
new file mode 100644
index 000000000..abe5b601a
--- /dev/null
+++ b/lexlua/myrddin.lua
@@ -0,0 +1,54 @@
+-- Copyright 2017-2018 Michael Forney. See License.txt
+-- Myrddin LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('myrddin')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break const continue elif else extern false for generic goto if impl in match
+ pkg pkglocal sizeof struct trait true type union use var while
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ void bool char byte int uint int8 uint8 int16 uint16 int32 uint32 int64 uint64
+ flt32 flt64
+]] + '@' * lexer.word))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = P{
+ V'part' * P'*/'^-1,
+ part = '/*' * (V'full' + (lexer.any - '/*' - '*/'))^0,
+ full = V'part' * '*/',
+}
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Numbers.
+local digit = lexer.digit + '_'
+local bdigit = R'01' + '_'
+local xdigit = lexer.xdigit + '_'
+local odigit = R'07' + '_'
+local integer = '0x' * xdigit^1 + '0o' * odigit^1 + '0b' * bdigit^1 + digit^1
+local float = digit^1 * (('.' * digit^1) * (S'eE' * S'+-'^-1 * digit^1)^-1 +
+ ('.' * digit^1)^-1 * S'eE' * S'+-'^-1 * digit^1)
+lex:add_rule('number', token(lexer.NUMBER, float + integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S'`#_+-/*%<>~!=^&|~:;,.()[]{}'))
+
+return lex
diff --git a/lexlua/nemerle.lua b/lexlua/nemerle.lua
new file mode 100644
index 000000000..9941c1a95
--- /dev/null
+++ b/lexlua/nemerle.lua
@@ -0,0 +1,66 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Nemerle LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('nemerle')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ _ abstract and array as base catch class def do else extends extern finally
+ foreach for fun if implements in interface internal lock macro match module
+ mutable namespace new out override params private protected public ref repeat
+ sealed static struct syntax this throw try type typeof unless until using
+ variant virtual when where while
+ -- Values.
+ null true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool byte char decimal double float int list long object sbyte short string
+ uint ulong ushort void
+]]))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define elif else endif endregion error if ifdef ifndef line pragma region
+ undef using warning
+]]
+lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ S('\t ')^0 * preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion')
+lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/nim.lua b/lexlua/nim.lua
new file mode 100644
index 000000000..aea719a1b
--- /dev/null
+++ b/lexlua/nim.lua
@@ -0,0 +1,101 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Nim LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('nim', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ addr and as asm atomic bind block break case cast const continue converter
+ discard distinct div do elif else end enum except export finally for from
+ generic if import in include interface is isnot iterator lambda let macro
+ method mixin mod nil not notin object of or out proc ptr raise ref return
+ shared shl static template try tuple type var when while with without xor
+ yield
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ -- Procs.
+ defined definedInScope new unsafeNew internalNew reset high low sizeof succ
+ pred inc dec newSeq len incl excl card ord chr ze ze64 toU8 toU16 toU32 abs
+ min max contains cmp setLen newString newStringOfCap add compileOption quit
+ shallowCopy del delete insert repr toFloat toBiggestFloat toInt toBiggestInt
+ addQuitProc substr zeroMem copyMem moveMem equalMem swap getRefcount clamp
+ isNil find contains pop each map GC_ref GC_unref echo debugEcho getTypeInfo
+ Open repopen Close EndOfFile readChar FlushFile readAll readFile writeFile
+ write readLine writeln getFileSize ReadBytes ReadChars readBuffer writeBytes
+ writeChars writeBuffer setFilePos getFilePos fileHandle cstringArrayToSeq
+ allocCStringArray deallocCStringArray atomicInc atomicDec compareAndSwap
+ setControlCHook writeStackTrace getStackTrace alloc alloc0 dealloc realloc
+ getFreeMem getTotalMem getOccupiedMem allocShared allocShared0 deallocShared
+ reallocShared IsOnStack GC_addCycleRoot GC_disable GC_enable GC_setStrategy
+ GC_enableMarkAndSweep GC_disableMarkAndSweep GC_fullCollect GC_getStatistics
+ nimDestroyRange getCurrentException getCurrentExceptionMsg onRaise likely
+ unlikely rawProc rawEnv finished slurp staticRead gorge staticExec rand
+ astToStr InstatiationInfo raiseAssert shallow compiles safeAdd locals
+ -- Iterators.
+ countdown countup items pairs fields fieldPairs lines
+ -- Templates.
+ accumulateResult newException CurrentSourcePath assert doAssert onFailedAssert
+ eval
+ -- Threads.
+ running joinThread joinThreads createThread threadId myThreadId
+ -- Channels.
+ send recv peek ready
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE , word_match([[
+ int int8 int16 int32 int64 uint uint8 uint16 uint32 uint64 float float32
+ float64 bool char string cstring pointer Ordinal auto any TSignedInt
+ TUnsignedInt TInteger TOrdinal TReal TNumber range array openarray varargs
+ seq set TSlice TThread TChannel
+ -- Meta Types.
+ expr stmt typeDesc void
+]], true)))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ on off isMainModule CompileDate CompileTime NimVersion NimMajor NimMinor
+ NimPatch cpuEndian hostOS hostCPU appType QuitSuccess QuitFailure inf neginf
+ nan
+]]))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local triple_dq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local raw_dq_str = 'r' * lexer.delimited_range('"', false, true)
+lex:add_rule('string', token(lexer.STRING, triple_dq_str + sq_str + dq_str +
+ raw_dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline_esc^0))
+
+-- Numbers.
+local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0
+local hex = '0' * S('xX') * lexer.xdigit^1 * ('_' * lexer.xdigit^1)^0
+local bin = '0' * S('bB') * S('01')^1 * ('_' * S('01')^1)^0
+local oct = '0o' * R('07')^1
+local integer = S('+-')^-1 * (bin + hex + oct + dec) *
+ ("'" * S('iIuUfF') * (P('8') + '16' + '32' + '64'))^-1
+local float = lexer.digit^1 * ('_' * lexer.digit^1)^0 *
+ ('.' * ('_' * lexer.digit)^0)^-1 * S('eE') * S('+-')^-1 *
+ lexer.digit^1 * ('_' * lexer.digit^1)^0
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('=+-*/<>@$~&%|!?^.:\\`()[]{},;')))
+
+return lex
diff --git a/lexlua/nsis.lua b/lexlua/nsis.lua
new file mode 100644
index 000000000..0387791bc
--- /dev/null
+++ b/lexlua/nsis.lua
@@ -0,0 +1,146 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- NSIS LPeg lexer
+-- Based on NSIS 2.46 docs: http://nsis.sourceforge.net/Docs/.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('nsis')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments (4.1).
+local line_comment = (P(';') + '#') * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * '*/'
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"') +
+ lexer.delimited_range('`')))
+
+-- Constants (4.2.3).
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ $PROGRAMFILES $PROGRAMFILES32 $PROGRAMFILES64 $COMMONFILES $COMMONFILES32
+ $COMMONFILES64 $DESKTOP $EXEDIR $EXEFILE $EXEPATH ${NSISDIR} $WINDIR $SYSDIR
+ $TEMP $STARTMENU $SMPROGRAMS $SMSTARTUP $QUICKLAUNCH$DOCUMENTS $SENDTO $RECENT
+ $FAVORITES $MUSIC $PICTURES $VIDEOS $NETHOOD $FONTS $TEMPLATES $APPDATA
+ $LOCALAPPDATA $PRINTHOOD $INTERNET_CACHE $COOKIES $HISTORY $PROFILE
+ $ADMINTOOLS $RESOURCES $RESOURCES_LOCALIZED $CDBURN_AREA $HWNDPARENT
+ $PLUGINSDIR
+]]))
+-- TODO? Constants used in strings: $$ $\r $\n $\t
+
+-- Variables (4.2).
+lex:add_rule('variable', token(lexer.VARIABLE, word_match[[
+ $0 $1 $2 $3 $4 $5 $6 $7 $8 $9 $R0 $R1 $R2 $R3 $R4 $R5 $R6 $R7 $R8 $R9
+ $INSTDIR $OUTDIR $CMDLINE $LANGUAGE Var /GLOBAL
+]]) + '$' * lexer.word)
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Pages (4.5).
+ Page UninstPage PageEx PageEnd PageExEnd
+ -- Section commands (4.6).
+ AddSize Section SectionEnd SectionIn SectionGroup SectionGroupEnd
+ -- Functions (4.7).
+ Function FunctionEnd
+ -- Callbacks (4.7.2).
+ .onGUIInit .onInit .onInstFailed .onInstSuccess .onGUIEnd .onMouseOverSection
+ .onRebootFailed .onSelChange .onUserAbort .onVerifyInstDir un.onGUIInit
+ un.onInit un.onUninstFailed un.onUninstSuccess un.onGUIEnd un.onRebootFailed
+ un.onSelChange un.onUserAbort
+ -- General Attributes (4.8.1).
+ AddBrandingImage AllowRootDirInstall AutoCloseWindow BGFont BGFont
+ BrandingText /TRIMLEFT /TRIMRIGHT /TRIMCENTER Caption ChangeUI CheckBitmap
+ CompletedText ComponentText CRCCheck DetailsButtonText DirText DirVar
+ DirVerify FileErrorText Icon InstallButtonText InstallColors InstallDir
+ InstallDirRegKey InstProgressFlags InstType LicenseBkColor LicenseData
+ LicenseForceSelection LicenseText MiscButtonText Name OutFile
+ RequestExecutionLevel SetFont ShowInstDetails ShowUninstDetails SilentInstall
+ SilentUnInstall SpaceTexts SubCaption UninstallButtonText UninstallCaption
+ UninstallIcon UninstallSubCaption UninstallText WindowIcon XPStyle admin auto
+ bottom checkbox false force height hide highest leave left nevershow none
+ normal off on radiobuttons right show silent silentlog top true user width
+ -- Compiler Flags (4.8.2).
+ AllowSkipFiles FileBufSize SetCompress SetCompressor /SOLID /FINAL zlib bzip2
+ lzma SetCompressorDictSize SetDatablockOptimize SetDateSave SetOverwrite
+ ifnewer ifdiff lastused try
+ -- Version Information (4.8.3).
+ VIAddVersionKey VIProductVersion /LANG ProductName Comments CompanyName
+ LegalCopyright FileDescription FileVersion ProductVersion InternalName
+ LegalTrademarks OriginalFilename PrivateBuild SpecialBuild
+ -- Basic Instructions (4.9.1).
+ Delete /REBOOTOK Exec ExecShell ExecShell File /nonfatal Rename ReserveFile
+ RMDir SetOutPath
+ -- Registry INI File Instructions (4.9.2).
+ DeleteINISec DeleteINIStr DeleteRegKey /ifempty DeleteRegValue EnumRegKey
+ EnumRegValue ExpandEnvStrings FlushINI ReadEnvStr ReadINIStr ReadRegDWORD
+ ReadRegStr WriteINIStr WriteRegBin WriteRegDWORD WriteRegStr WriteRegExpandStr
+ HKCR HKEY_CLASSES_ROOT HKLM HKEY_LOCAL_MACHINE HKCU HKEY_CURRENT_USER HKU
+ HKEY_USERS HKCC HKEY_CURRENT_CONFIG HKDD HKEY_DYN_DATA HKPD
+ HKEY_PERFORMANCE_DATA SHCTX SHELL_CONTEXT
+ -- General Purpose Instructions (4.9.3).
+ CallInstDLL CopyFiles /SILENT /FILESONLY CreateDirectory CreateShortCut
+ GetDLLVersion GetDLLVersionLocal GetFileTime GetFileTimeLocal GetFullPathName
+ /SHORT GetTempFileName SearchPath SetFileAttributes RegDLL UnRegDLL
+ -- Flow Control Instructions (4.9.4).
+ Abort Call ClearErrors GetCurrentAddress GetFunctionAddress GetLabelAddress
+ Goto IfAbort IfErrors IfFileExists IfRebootFlag IfSilent IntCmp IntCmpU
+ MessageBox MB_OK MB_OKCANCEL MB_ABORTRETRYIGNORE MB_RETRYCANCEL MB_YESNO
+ MB_YESNOCANCEL MB_ICONEXCLAMATION MB_ICONINFORMATION MB_ICONQUESTION
+ MB_ICONSTOP MB_USERICON MB_TOPMOST MB_SETFOREGROUND MB_RIGHT MB_RTLREADING
+ MB_DEFBUTTON1 MB_DEFBUTTON2 MB_DEFBUTTON3 MB_DEFBUTTON4 IDABORT IDCANCEL
+ IDIGNORE IDNO IDOK IDRETRY IDYES Return Quit SetErrors StrCmp StrCmpS
+ -- File Instructions (4.9.5).
+ FileClose FileOpen FileRead FileReadByte FileSeek FileWrite FileWriteByte
+ FindClose FindFirst FindNext
+ -- Uninstaller Instructions (4.9.6).
+ WriteUninstaller
+ -- Miscellaneous Instructions (4.9.7).
+ GetErrorLevel GetInstDirError InitPluginsDir Nop SetErrorLevel SetRegView
+ SetShellVarContext all current Sleep
+ -- String Manipulation Instructions (4.9.8).
+ StrCpy StrLen
+ -- Stack Support (4.9.9).
+ Exch Pop Push
+ -- Integer Support (4.9.10).
+ IntFmt IntOp
+ -- Reboot Instructions (4.9.11).
+ Reboot SetRebootFlag
+ -- Install Logging Instructions (4.9.12).
+ LogSet LogText
+ -- Section Management (4.9.13).
+ SectionSetFlags SectionGetFlags SectionGetFlags SectionSetText SectionGetText
+ SectionSetInstTypes SectionGetInstTypes SectionSetSize SectionGetSize
+ SetCurInstType GetCurInstType InstTypeSetText InstTypeGetText
+ -- User Interface Instructions (4.9.14).
+ BringToFront CreateFont DetailPrint EnableWindow FindWindow GetDlgItem
+ HideWindow IsWindow LockWindow SendMessage SetAutoClose SetBrandingImage
+ SetDetailsView SetDetailsPrint listonlytextonly both SetCtlColors /BRANDING
+ SetSilent ShowWindow
+ -- Multiple Languages Instructions (4.9.15).
+ LoadLanguageFile LangString LicenseLangString
+ -- Compile time commands (5).
+ !include !addincludedir !addplugindir !appendfile !cd !delfile !echo !error
+ !execute !packhdr !system !tempfile !warning !verbose {__FILE__} {__LINE__}
+ {__DATE__} {__TIME__} {__TIMESTAMP__} {NSIS_VERSION} !define !undef !ifdef
+ !ifndef !if !ifmacrodef !ifmacrondef !else !endif !insertmacro !macro
+ !macroend !searchparse !searchreplace
+]]))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%|&^~!<>')))
+
+-- Labels (4.3).
+lex:add_rule('label', token(lexer.LABEL, lexer.word * ':'))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+return lex
diff --git a/lexlua/null.lua b/lexlua/null.lua
new file mode 100644
index 000000000..8854d0433
--- /dev/null
+++ b/lexlua/null.lua
@@ -0,0 +1,4 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Null LPeg lexer.
+
+return require('lexer').new('null')
diff --git a/lexlua/objective_c.lua b/lexlua/objective_c.lua
new file mode 100644
index 000000000..3d2f02ebb
--- /dev/null
+++ b/lexlua/objective_c.lua
@@ -0,0 +1,71 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Objective C LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('objective_c')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- From C.
+ asm auto break case const continue default do else extern false for goto if
+ inline register return sizeof static switch true typedef void volatile while
+ restrict _Bool _Complex _Pragma _Imaginary
+ -- Objective C.
+ oneway in out inout bycopy byref self super
+ -- Preprocessor directives.
+ @interface @implementation @protocol @end @private @protected @public @class
+ @selector @encode @defs @synchronized @try @throw @catch @finally
+ -- Constants.
+ TRUE FALSE YES NO NULL nil Nil METHOD_NULL
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ apply_t id Class MetaClass Object Protocol retval_t SEL STR IMP BOOL
+ TypedStream
+]]))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+local preproc_word = word_match[[
+ define elif else endif error if ifdef ifndef import include line pragma undef
+ warning
+]]
+lex:add_rule('preprocessor', #lexer.starts_line('#') *
+ token(lexer.PREPROCESSOR, '#' * S('\t ')^0 *
+ preproc_word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))
+
+-- Fold symbols.
+lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion')
+lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif')
+lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/pascal.lua b/lexlua/pascal.lua
new file mode 100644
index 000000000..495175f4c
--- /dev/null
+++ b/lexlua/pascal.lua
@@ -0,0 +1,62 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Pascal LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('pascal')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ and array as at asm begin case class const constructor destructor
+ dispinterface div do downto else end except exports file final finalization
+ finally for function goto if implementation in inherited initialization inline
+ interface is label mod not object of on or out packed procedure program
+ property raise record repeat resourcestring set sealed shl shr static string
+ then threadvar to try type unit unsafe until uses var while with xor
+ absolute abstract assembler automated cdecl contains default deprecated dispid
+ dynamic export external far forward implements index library local message
+ name namespaces near nodefault overload override package pascal platform
+ private protected public published read readonly register reintroduce requires
+ resident safecall stdcall stored varargs virtual write writeln writeonly
+ false nil self true
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ chr ord succ pred abs round trunc sqr sqrt arctan cos sin exp ln odd eof eoln
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match([[
+ shortint byte char smallint integer word longint cardinal boolean bytebool
+ wordbool longbool real single double extended comp currency pointer
+]], true)))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ S('uUrR')^-1 *
+ lexer.delimited_range("'", true, true)))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local bblock_comment = '{' * (lexer.any - '}')^0 * P('}')^-1
+local pblock_comment = '(*' * (lexer.any - '*)')^0 * P('*)')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + bblock_comment +
+ pblock_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlDdFf')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('.,;^@:=<>+-/*()[]')))
+
+return lex
diff --git a/lexlua/perl.lua b/lexlua/perl.lua
new file mode 100644
index 000000000..2dfc70152
--- /dev/null
+++ b/lexlua/perl.lua
@@ -0,0 +1,142 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Perl LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('perl')
+
+-- Whitespace.
+lex:add_rule('perl', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ STDIN STDOUT STDERR BEGIN END CHECK INIT
+ require use
+ break continue do each else elsif foreach for if last local my next our
+ package return sub unless until while __FILE__ __LINE__ __PACKAGE__
+ and or not eq ne lt gt le ge
+]]))
+
+-- Markers.
+lex:add_rule('marker', token(lexer.COMMENT, word_match[[__DATA__ __END__]] *
+ lexer.any^0))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs accept alarm atan2 bind binmode bless caller chdir chmod chomp chop chown
+ chr chroot closedir close connect cos crypt dbmclose dbmopen defined delete
+ die dump each endgrent endhostent endnetent endprotoent endpwent endservent
+ eof eval exec exists exit exp fcntl fileno flock fork format formline getc
+ getgrent getgrgid getgrnam gethostbyaddr gethostbyname gethostent getlogin
+ getnetbyaddr getnetbyname getnetent getpeername getpgrp getppid getpriority
+ getprotobyname getprotobynumber getprotoent getpwent getpwnam getpwuid
+ getservbyname getservbyport getservent getsockname getsockopt glob gmtime goto
+ grep hex import index int ioctl join keys kill lcfirst lc length link listen
+ localtime log lstat map mkdir msgctl msgget msgrcv msgsnd new oct opendir open
+ ord pack pipe pop pos printf print prototype push quotemeta rand readdir read
+ readlink recv redo ref rename reset reverse rewinddir rindex rmdir scalar
+ seekdir seek select semctl semget semop send setgrent sethostent setnetent
+ setpgrp setpriority setprotoent setpwent setservent setsockopt shift shmctl
+ shmget shmread shmwrite shutdown sin sleep socket socketpair sort splice split
+ sprintf sqrt srand stat study substr symlink syscall sysread sysseek system
+ syswrite telldir tell tied tie time times truncate ucfirst uc umask undef
+ unlink unpack unshift untie utime values vec wait waitpid wantarray warn write
+]]))
+
+local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}', ['<'] = '>'}
+local literal_delimitted = P(function(input, index) -- for single delimiter sets
+ local delimiter = input:sub(index, index)
+ if not delimiter:find('%w') then -- only non alpha-numerics
+ local match_pos, patt
+ if delimiter_matches[delimiter] then
+ -- Handle nested delimiter/matches in strings.
+ local s, e = delimiter, delimiter_matches[delimiter]
+ patt = lexer.delimited_range(s..e, false, false, true)
+ else
+ patt = lexer.delimited_range(delimiter)
+ end
+ match_pos = lpeg.match(patt, input, index)
+ return match_pos or #input + 1
+ end
+end)
+local literal_delimitted2 = P(function(input, index) -- for 2 delimiter sets
+ local delimiter = input:sub(index, index)
+ -- Only consider non-alpha-numerics and non-spaces as delimiters. The
+ -- non-spaces are used to ignore operators like "-s".
+ if not delimiter:find('[%w ]') then
+ local match_pos, patt
+ if delimiter_matches[delimiter] then
+ -- Handle nested delimiter/matches in strings.
+ local s, e = delimiter, delimiter_matches[delimiter]
+ patt = lexer.delimited_range(s..e, false, false, true)
+ else
+ patt = lexer.delimited_range(delimiter)
+ end
+ first_match_pos = lpeg.match(patt, input, index)
+ final_match_pos = lpeg.match(patt, input, first_match_pos - 1)
+ if not final_match_pos then -- using (), [], {}, or <> notation
+ final_match_pos = lpeg.match(lexer.space^0 * patt, input, first_match_pos)
+ end
+ return final_match_pos or #input + 1
+ end
+end)
+
+-- Strings.
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local cmd_str = lexer.delimited_range('`')
+local heredoc = '<<' * P(function(input, index)
+ local s, e, delimiter = input:find('([%a_][%w_]*)[\n\r\f;]+', index)
+ if s == index and delimiter then
+ local end_heredoc = '[\n\r\f]+'
+ local _, e = input:find(end_heredoc..delimiter, e)
+ return e and e + 1 or #input + 1
+ end
+end)
+local lit_str = 'q' * P('q')^-1 * literal_delimitted
+local lit_array = 'qw' * literal_delimitted
+local lit_cmd = 'qx' * literal_delimitted
+local lit_tr = (P('tr') + 'y') * literal_delimitted2 * S('cds')^0
+local regex_str = #P('/') * lexer.last_char_includes('-<>+*!~\\=%&|^?:;([{') *
+ lexer.delimited_range('/', true) * S('imosx')^0
+local lit_regex = 'qr' * literal_delimitted * S('imosx')^0
+local lit_match = 'm' * literal_delimitted * S('cgimosx')^0
+local lit_sub = 's' * literal_delimitted2 * S('ecgimosx')^0
+lex:add_rule('string',
+ token(lexer.STRING, sq_str + dq_str + cmd_str + heredoc + lit_str +
+ lit_array + lit_cmd + lit_tr) +
+ token(lexer.REGEX, regex_str + lit_regex + lit_match + lit_sub))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '#' * lexer.nonnewline_esc^0
+local block_comment = lexer.starts_line('=') * lexer.alpha *
+ (lexer.any - lexer.newline * '=cut')^0 *
+ (lexer.newline * '=cut')^-1
+lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Variables.
+local special_var = '$' * ('^' * S('ADEFHILMOPSTWX')^-1 +
+ S('\\"[]\'&`+*.,;=%~?@<>(|/!-') +
+ ':' * (lexer.any - ':') +
+ P('$') * -lexer.word +
+ lexer.digit^1)
+local plain_var = ('$#' + S('$@%')) * P('$')^0 * lexer.word + '$#'
+lex:add_rule('variable', token(lexer.VARIABLE, special_var + plain_var))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('-<>+*!~\\=/%&|^.?:;()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/php.lua b/lexlua/php.lua
new file mode 100644
index 000000000..b989755c4
--- /dev/null
+++ b/lexlua/php.lua
@@ -0,0 +1,75 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- PHP LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('php')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and array as bool boolean break case cfunction class const continue declare
+ default die directory do double echo else elseif empty enddeclare endfor
+ endforeach endif endswitch endwhile eval exit extends false float for foreach
+ function global if include include_once int integer isset list new null object
+ old_function or parent print real require require_once resource return static
+ stdclass string switch true unset use var while xor
+ __class__ __file__ __function__ __line__ __sleep __wakeup
+]]))
+
+local word = (lexer.alpha + '_' + R('\127\255')) *
+ (lexer.alnum + '_' + R('\127\255'))^0
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '$' * word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local bt_str = lexer.delimited_range('`')
+local heredoc = '<<<' * P(function(input, index)
+ local _, e, delimiter = input:find('([%a_][%w_]*)[\n\r\f]+', index)
+ if delimiter then
+ local _, e = input:find('[\n\r\f]+'..delimiter, e)
+ return e and e + 1
+ end
+end)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bt_str + heredoc))
+-- TODO: interpolated code.
+
+-- Comments.
+local line_comment = (P('//') + '#') * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!@%^*&()-+=|/?.,;:<>[]{}')))
+
+-- Embedded in HTML.
+local html = lexer.load('html')
+
+-- Embedded PHP.
+local php_start_rule = token('php_tag', '<?' * ('php' * lexer.space)^-1)
+local php_end_rule = token('php_tag', '?>')
+html:embed(lex, php_start_rule, php_end_rule)
+lex:add_style('php_tag', lexer.STYLE_EMBEDDED)
+
+-- Fold points.
+lex:add_fold_point('php_tag', '<?', '?>')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+
+return lex
diff --git a/lexlua/pico8.lua b/lexlua/pico8.lua
new file mode 100644
index 000000000..9e70802c3
--- /dev/null
+++ b/lexlua/pico8.lua
@@ -0,0 +1,39 @@
+-- Copyright 2016-2018 Alejandro Baez (https://keybase.io/baez). See License.txt.
+-- PICO-8 lexer.
+-- http://www.lexaloffle.com/pico-8.php
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('pico8')
+
+-- Whitespace
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ __lua__ __gfx__ __gff__ __map__ __sfx__ __music__
+]]))
+
+-- Identifiers
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments
+lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline_esc^0))
+
+-- Numbers
+lex:add_rule('number', token(lexer.NUMBER, lexer.integer))
+
+-- Operators
+lex:add_rule('operator', token(lexer.OPERATOR, S('_')))
+
+-- Embed Lua into PICO-8.
+local lua = lexer.load('lua')
+
+local lua_start_rule = token('pico8_tag', '__lua__')
+local lua_end_rule = token('pico8_tag', '__gfx__' )
+lex:embed(lua, lua_start_rule, lua_end_rule)
+lex:add_style('pico8_tag', lexer.STYLE_EMBEDDED)
+
+return lex
diff --git a/lexlua/pike.lua b/lexlua/pike.lua
new file mode 100644
index 000000000..77caed4c9
--- /dev/null
+++ b/lexlua/pike.lua
@@ -0,0 +1,56 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Pike LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('pike')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break case catch continue default do else for foreach gauge if lambda return
+ sscanf switch while import inherit
+ -- Type modifiers.
+ constant extern final inline local nomask optional private protected public
+ static variant
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ array class float function int mapping mixed multiset object program string
+ void
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true) +
+ '#' * lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline_esc^0 +
+ lexer.nested_pair('/*', '*/')))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('lLdDfF')^-1))
+
+-- Preprocessors.
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<>=!+-/*%&|^~@`.,:;()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/pkgbuild.lua b/lexlua/pkgbuild.lua
new file mode 100644
index 000000000..bc5e9a672
--- /dev/null
+++ b/lexlua/pkgbuild.lua
@@ -0,0 +1,79 @@
+-- Copyright 2006-2018 gwash. See License.txt.
+-- Archlinux PKGBUILD LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('pkgbuild')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", false, true)
+local dq_str = lexer.delimited_range('"')
+local ex_str = lexer.delimited_range('`')
+local heredoc = '<<' * P(function(input, index)
+ local s, e, _, delimiter =
+ input:find('(["\']?)([%a_][%w_]*)%1[\n\r\f;]+', index)
+ if s == index and delimiter then
+ local _, e = input:find('[\n\r\f]+'..delimiter, e)
+ return e and e + 1 or #input + 1
+ end
+end)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ex_str + heredoc))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ patch cd make patch mkdir cp sed install rm if then elif else fi case in esac
+ while for do done continue local return git svn co clone gconf-merge-schema
+ msg echo ln
+ -- Operators.
+ -a -b -c -d -e -f -g -h -k -p -r -s -t -u -w -x -O -G -L -S -N -nt -ot -ef -o
+ -z -n -eq -ne -lt -le -gt -ge -Np -i
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ build check package pkgver prepare
+]] * '()'))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ -- We do *not* list pkgver srcdir and startdir here.
+ -- These are defined by makepkg but user should not alter them.
+ arch backup changelog checkdepends conflicts depends epoch groups install
+ license makedepends md5sums noextract optdepends options pkgbase pkgdesc
+ pkgname pkgrel pkgver provides replaces sha1sums sha256sums sha384sums
+ sha512sums source url validpgpkeys
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ '$' * (S('!#?*@$') +
+ lexer.delimited_range('()', true, true) +
+ lexer.delimited_range('[]', true, true) +
+ lexer.delimited_range('{}', true, true) +
+ lexer.delimited_range('`', true, true) +
+ lexer.digit^1 +
+ lexer.word)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^~.,:;?()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/powershell.lua b/lexlua/powershell.lua
new file mode 100644
index 000000000..17bf988fc
--- /dev/null
+++ b/lexlua/powershell.lua
@@ -0,0 +1,63 @@
+-- Copyright 2015-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- PowerShell LPeg lexer.
+-- Contributed by Jeff Stone.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('powershell')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ Begin Break Continue Do Else End Exit For ForEach ForEach-Object Get-Date
+ Get-Random If Param Pause Powershell Process Read-Host Return Switch While
+ Write-Host
+]], true)))
+
+-- Comparison Operators.
+lex:add_rule('comparison', token(lexer.KEYWORD, '-' * word_match([[
+ and as band bor contains eq ge gt is isnot le like lt match ne nomatch not
+ notcontains notlike or replace
+]], true)))
+
+-- Parameters.
+lex:add_rule('parameter', token(lexer.KEYWORD, '-' * word_match([[
+ Confirm Debug ErrorAction ErrorVariable OutBuffer OutVariable Verbose WhatIf
+]], true)))
+
+-- Properties.
+lex:add_rule('property', token(lexer.KEYWORD, '.' * word_match([[
+ day dayofweek dayofyear hour millisecond minute month second timeofday year
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.KEYWORD, '[' * word_match([[
+ array boolean byte char datetime decimal double hashtable int long single
+ string xml
+]], true) * ']'))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE,
+ '$' * (lexer.digit^1 + lexer.word +
+ lexer.delimited_range('{}', true, true))))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}%`')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+return lex
diff --git a/lexlua/prolog.lua b/lexlua/prolog.lua
new file mode 100644
index 000000000..72a7d1927
--- /dev/null
+++ b/lexlua/prolog.lua
@@ -0,0 +1,129 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Prolog LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('prolog')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- Directives by manual scanning of SWI-Prolog source code
+ abolish arithmetic_function at_halt create_prolog_flag discontiguous dynamic
+ elif else endif format_predicate if initialization lazy_list_iterator listing
+ load_extensions meta_predicate mode module module_transparent multifile op
+ persistent pop_operators pred predicate_options prolog_load_context public
+ push_operators record redefine_system_predicate reexport set_prolog_flag
+ setting thread_local type use_foreign_library use_module volatile
+ -- Built-in predicates generated in SWI-Prolog via current_predictate/1.
+ abolish abort absolute_file_name access_file acyclic_term add_import_module
+ append apply arg assert asserta assertz at_end_of_stream at_halt atom
+ atom_chars atom_codes atom_concat atomic atomic_concat atomic_list_concat
+ atomics_to_string atom_length atom_number atom_prefix atom_string atom_to_term
+ attach_packs attvar autoload_path bagof between b_getval blob break b_set_dict
+ b_setval byte_count call callable call_cleanup call_continuation call_dcg
+ call_residue_vars call_shared_object_function call_with_depth_limit
+ call_with_inference_limit cancel_halt catch character_count char_code
+ char_conversion char_type clause clause_property close close_shared_object
+ code_type collation_key compare compile_aux_clauses compile_predicates
+ compiling compound compound_name_arguments compound_name_arity consult
+ context_module copy_predicate_clauses copy_stream_data copy_term copy_term_nat
+ create_prolog_flag current_arithmetic_function current_atom current_blob
+ current_char_conversion current_engine current_flag current_format_predicate
+ current_functor current_input current_key current_locale current_module
+ current_op current_output current_predicate current_prolog_flag
+ current_resource current_signal current_trie cwd cyclic_term date_time_stamp
+ dcg_translate_rule debugging default_module del_attr del_attrs del_dict
+ delete_directory delete_file delete_import_module deterministic dict_create
+ dict_pairs directory_files divmod downcase_atom duplicate_term dwim_match
+ dwim_predicate engine_create engine_destroy engine_fetch engine_next
+ engine_next_reified engine_post engine_self engine_yield ensure_loaded erase
+ exception exists_directory exists_file expand_answer expand_file_name
+ expand_file_search_path expand_goal expand_query expand_term export
+ extern_indirect fail false fast_read fast_term_serialized fast_write
+ file_base_name file_directory_name file_name_extension file_search_path
+ fill_buffer findall findnsols flag float flush_output forall format
+ format_predicate format_time freeze frozen functor garbage_collect
+ garbage_collect_atoms garbage_collect_clauses gc_file_search_cache get0 get
+ get_attr get_attrs get_byte get_char get_code get_dict getenv get_flag
+ get_single_char get_string_code get_time goal_expansion ground halt ignore
+ import import_module instance integer intern_indirect is_absolute_file_name
+ is_dict is_engine is_list is_stream is_thread keysort known_licenses leash
+ length library_directory license line_count line_position load_files
+ locale_create locale_destroy locale_property make_directory make_library_index
+ memberchk message_hook message_property message_queue_create
+ message_queue_destroy message_queue_property message_to_string module
+ module_property msort mutex_create mutex_destroy mutex_lock mutex_property
+ mutex_statistics mutex_trylock mutex_unlock mutex_unlock_all name nb_current
+ nb_delete nb_getval nb_linkarg nb_link_dict nb_linkval nb_setarg nb_set_dict
+ nb_setval nl nonvar noprofile noprotocol normalize_space nospy nospyall not
+ notrace nth_clause nth_integer_root_and_remainder number number_chars
+ number_codes number_string numbervars once on_signal op open open_null_stream
+ open_resource open_shared_object open_string open_xterm peek_byte peek_char
+ peek_code peek_string phrase plus portray predicate_option_mode
+ predicate_option_type predicate_property print print_message
+ print_message_lines print_toplevel_variables profiler prolog
+ prolog_choice_attribute prolog_current_choice prolog_current_frame
+ prolog_cut_to prolog_debug prolog_event_hook prolog_file_type
+ prolog_frame_attribute prolog_list_goal prolog_load_context prolog_load_file
+ prolog_nodebug prolog_skip_frame prolog_skip_level prolog_stack_property
+ prolog_to_os_filename prompt1 prompt protocol protocola protocolling put
+ put_attr put_attrs put_byte put_char put_code put_dict pwd qcompile
+ random_property rational read read_clause read_history read_link
+ read_pending_chars read_pending_codes read_string read_term
+ read_term_from_atom recorda recorded recordz redefine_system_predicate
+ reexport reload_library_index rename_file repeat require reset reset_profiler
+ residual_goals resource retract retractall same_file same_term see seeing seek
+ seen select_dict setarg set_end_of_stream setenv set_flag set_input set_locale
+ setlocale set_module setof set_output set_prolog_flag set_prolog_IO
+ set_prolog_stack set_random set_stream set_stream_position
+ setup_call_catcher_cleanup setup_call_cleanup shell shift size_file skip sleep
+ sort source_file source_file_property source_location split_string spy
+ stamp_date_time statistics stream_pair stream_position_data stream_property
+ string string_chars string_code string_codes string_concat string_length
+ string_lower string_upper strip_module style_check sub_atom sub_atom_icasechk
+ sub_string subsumes_term succ swiplrc tab tell telling term_attvars
+ term_expansion term_hash term_string term_to_atom term_variables
+ text_to_string thread_at_exit thread_create thread_detach thread_exit
+ thread_get_message thread_join thread_message_hook thread_peek_message
+ thread_property thread_self thread_send_message thread_setconcurrency
+ thread_signal thread_statistics throw time_file tmp_file tmp_file_stream told
+ trace tracing trie_destroy trie_gen trie_insert trie_insert_new trie_lookup
+ trie_new trie_property trie_term trim_stacks true ttyflush tty_get_capability
+ tty_goto tty_put tty_size unifiable unify_with_occurs_check unload_file
+ unsetenv upcase_atom use_module var variant_hash variant_sha1 var_number
+ var_property verbose_expansion version visible wait_for_input wildcard_match
+ with_mutex with_output_to working_directory write write_canonical write_length
+ writeln writeq write_term
+ -- Built-in functions generated in SWI-Prolog via current_arithmetic_function/1.
+ xor rem rdiv mod div abs acos acosh asin asinh atan2 atan atanh ceil ceiling
+ copysign cos cosh cputime e epsilon erf erfc eval exp float
+ float_fractional_part float_integer_part floor gcd getbit inf integer lgamma
+ log10 log lsb max min msb nan pi popcount powm random random_float rational
+ rationalize round sign sin sinh sqrt tan tanh truncate
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '%' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.digit^1 *
+ ('.' * lexer.digit^1)^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('-!+\\|=:;&<>()[]{}')))
+
+return lex
diff --git a/lexlua/props.lua b/lexlua/props.lua
new file mode 100644
index 000000000..634a54031
--- /dev/null
+++ b/lexlua/props.lua
@@ -0,0 +1,33 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Props LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('props', {lex_by_line = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Colors.
+lex:add_rule('color', token('color', '#' * lexer.xdigit * lexer.xdigit *
+ lexer.xdigit * lexer.xdigit *
+ lexer.xdigit * lexer.xdigit))
+lex:add_style('color', lexer.STYLE_NUMBER)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Equals.
+lex:add_rule('equals', token(lexer.OPERATOR, '='))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '$(' * (lexer.any - ')')^1 *
+ ')'))
+
+return lex
diff --git a/lexlua/protobuf.lua b/lexlua/protobuf.lua
new file mode 100644
index 000000000..5891955ea
--- /dev/null
+++ b/lexlua/protobuf.lua
@@ -0,0 +1,45 @@
+-- Copyright 2016-2018 David B. Lamkins <david@lamkins.net>. See License.txt.
+-- Protocol Buffer IDL LPeg lexer.
+-- <https://developers.google.com/protocol-buffers/>
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('protobuf')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ contained syntax import option package message group oneof optional required
+ repeated default extend extensions to max reserved service rpc returns
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ int32 int64 uint32 uint64 sint32 sint64 fixed32 fixed64 sfixed32 sfixed64
+ float double bool string bytes enum true false
+]]))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('L')^-1 * lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<>=|;,.()[]{}')))
+
+return lex
diff --git a/lexlua/ps.lua b/lexlua/ps.lua
new file mode 100644
index 000000000..a6a211dd7
--- /dev/null
+++ b/lexlua/ps.lua
@@ -0,0 +1,47 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Postscript LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ps')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ pop exch dup copy roll clear count mark cleartomark counttomark exec if ifelse
+ for repeat loop exit stop stopped countexecstack execstack quit start
+ true false NULL
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ add div idiv mod mul sub abs ned ceiling floor round truncate sqrt atan cos
+ sin exp ln log rand srand rrand
+]]))
+
+-- Identifiers.
+local word = (lexer.alpha + '-') * (lexer.alnum + '-')^0
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Strings.
+local arrow_string = lexer.delimited_range('<>')
+local nested_string = lexer.delimited_range('()', false, false, true)
+lex:add_rule('string', token(lexer.STRING, arrow_string + nested_string))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, '/' * word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('[]{}')))
+
+return lex
diff --git a/lexlua/ps.lua.orig b/lexlua/ps.lua.orig
new file mode 100644
index 000000000..c6a98faa9
--- /dev/null
+++ b/lexlua/ps.lua.orig
@@ -0,0 +1,167 @@
+-- Copyright 2017 Marcio Baraco <marciorps@gmail.com>. See LICENSE.
+-- Postscript LPeg lexer.
+
+local l = require('lexer')
+local token, word_match = l.token, l.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'ps'}
+
+-- Whitespace.
+local ws = token(l.WHITESPACE, l.space^1)
+
+-- Comments.
+local comment = token(l.COMMENT, '%' * l.nonnewline^0)
+
+-- Strings.
+local nested_string = l.delimited_range('()', false, false, true)
+local hex_string = P('<') * (l.xdigit + l.space)^0 * P('>')^-1
+local enc_string = P('<~') * (R('!u') + l.space)^0 * P('~>')
+local str = token(l.STRING, nested_string + hex_string + enc_string)
+
+-- Numbers.
+local frac = (P('.') * l.digit^1)
+local expo = (S('eE') * S('+-')^-1 * l.digit^1)
+local decm = S('+-')^-1 * l.digit ^ 1 * frac^-1 * expo^-1
+local radx = l.digit^-2 * '#' * l.alnum^1
+-- TODO: Accept only chars that fit radix, ie [01] for 2#, hex for 16# and so.
+local number = token(l.NUMBER, decm + radx)
+
+-- PostScript allows almost all characters in names.
+local word = (l.graph - S('()<>[]{}/%'))^1
+-- Names.
+local identifier = token(l.IDENTIFIER, word)
+-- Deferred Names.
+local label = token(l.LABEL, '/' * word)
+-- Immediately Evaluated Names.
+local preproc = token(l.PREPROCESSOR, '//' * word)
+
+-- Object constructors.
+local operator = token(l.OPERATOR, S('[]{}=') + P('<<') + P('>>') + P('=='))
+
+-- Operators:
+-- + l.KEYWORD for basic ops
+-- + l.FUNCTION for graphic ops
+-- + l.CLASS for weird ps ops
+local keyword = token(l.KEYWORD, word_match{
+ -- Control operators.
+ 'exec', 'eexec', 'if', 'ifelse', 'for', 'repeat', 'loop', 'exit', 'stop',
+ 'stopped', 'countexecstack', 'execstack', 'quit', 'start',
+ -- Stack manipulation operators.
+ 'pop', 'exch', 'dup', 'copy', 'index', 'roll', 'clear', 'count', 'mark',
+ 'cleartomark', 'counttomark',
+ -- Array and operators.
+ 'array', 'string', 'length', 'get', 'put', 'getinterval', 'putinterval',
+ 'aload', 'astore', 'packedarray', 'setpacking', 'currentpacking', 'forall',
+ 'anchorsearch', 'search', 'token',
+ -- Dictionary operators.
+ 'dict', 'maxlength', 'begin', 'end', 'def', 'undef', 'load', 'store', 'known',
+ 'where', 'currentdict', 'errordict', 'systemdict', 'userdict', 'globaldict',
+ 'shareddict', 'statusdict', 'countdictstack', 'cleardictstack', 'dictstack',
+ -- Type, attribute and conversion operators.
+ 'type', 'cvlit', 'cvx', 'cvi', 'cvn', 'cvrs', 'cvs', 'cvr', 'xcheck',
+ 'executeonly', 'noaccess', 'readonly', 'rcheck', 'wcheck',
+ -- Arithmetic and math operators.
+ 'add', 'div', 'idiv', 'mod', 'mul', 'sub', 'abs', 'neg', 'ceiling', 'floor',
+ 'round', 'truncate', 'sqrt', 'atan', 'cos', 'sin', 'exp', 'ln', 'log', 'rand',
+ 'srand', 'rrand',
+ -- Relational, boolean and bitwise operators.
+ 'eq', 'ne', 'ge', 'gt', 'le', 'lt', 'and', 'not', 'or', 'xor', 'true',
+ 'false', 'bitshift',
+ -- Coordinate system and matrix operators.
+ 'matrix', 'initmatrix', 'identmatrix', 'defaultmatrix', 'currentmatrix',
+ 'setmatrix', 'translate', 'scale', 'rotate', 'concat', 'concatmatrix',
+ 'transform', 'dtransform', 'itransform', 'idtransform', 'invertmatrix',
+})
+local func = token(l.FUNCTION, word_match{
+ -- Path construction operators.
+ 'newpath', 'currentpoint', 'moveto', 'rmoveto', 'lineto', 'rlineto', 'arc',
+ 'arcn', 'arct', 'arcto', 'curveto', 'rcurveto', 'closepath', 'flattenpath',
+ 'reversepath', 'strokepath', 'ustrokepath', 'charpath', 'uappend', 'clippath',
+ 'setbbox', 'pathbbox', 'pathforall', 'upath', 'ucache', 'initclip', 'clip',
+ 'eoclip', 'rectclip',
+ -- Glyph and font operators.
+ 'definefont', 'composefont', 'undefinefont', 'findfont', 'scalefont',
+ 'makefont', 'setfont', 'rootfont', 'currentfont', 'selectfont', 'show',
+ 'ashow', 'widthshow', 'awidthshow', 'xshow', 'yshow', 'xyshow', 'glyphshow',
+ 'stringwidth', 'cshow', 'kshow', 'findencoding', 'FontDirectory',
+ 'GlobalFontDirectory', 'SharedFontDirectory', 'StandardEncoding',
+ 'ISOLatin1Encoding', 'setcachedevice', 'setcachedevice2', 'setcharwidth',
+ -- CID Font operators.
+ 'addglyph', 'beginbfchar', 'beginbfrange', 'begincidchar', 'begincidrange',
+ 'begincmap', 'begincodespacerange', 'beginnotdefchar', 'beginnotdefrange',
+ 'beginrearrangedfont', 'beginusematrix', 'endbfchar', 'endbfrange',
+ 'endcidchar', 'endcidrange', 'endcmap', 'endcodespacerange', 'endnotdefchar',
+ 'endnotdefrange', 'endrearrangedfont', 'endusermatrix', 'removeall',
+ 'removeglyphs', 'StartData', 'usecmap', 'usefont',
+ -- Painting operations.
+ 'erasepage', 'stroke', 'fill', 'eofill', 'rectstroke', 'rectfill', 'ustroke',
+ 'ufill', 'ueofill', 'shfill', 'image', 'imagemask', 'colorimage',
+ -- Insideness testing operators.
+ 'infill', 'ineofill', 'inufill', 'inueofill', 'instroke', 'inustroke',
+ -- Form and pattern operators.
+ 'makepattern', 'setpattern', 'execform',
+ -- Graphics state operators.
+ 'gsave', 'grestore', 'clipsave', 'cliprestore', 'grestoreall', 'initgraphics',
+ 'gstate', 'setgstate', 'currentgstate', 'setlinewidth', 'currentlinewidth',
+ 'setlinecap', 'currentlinecap', 'setlinejoin', 'currentlinejoin',
+ 'setmiterlimit', 'currentmiterlimit', 'setstrokeadjust',
+ 'currentstrokeadjust', 'setdash', 'currentdash', 'setcolorspace',
+ 'currentcolorspace', 'setcolor', 'setgray', 'currentgray', 'sethsbcolor',
+ 'currenthsbcolor', 'setrgbcolor', 'currentrgbcolor', 'setcmykcolor',
+ 'currentcmykcolor', 'sethalftone', 'currenthalftone', 'setscreen',
+ 'currentscreen', 'setcolorscreen', 'currentcolorscreen', 'settransfer',
+ 'currenttransfer', 'setcolortransfer', 'currentcolortransfer',
+ 'setblackgeneration', 'currentblackgeneration', 'setundercolorremoval',
+ 'currentundercolorremoval', 'setcolorrendering', 'currentcolorrendering',
+ 'setflat', 'currentflat', 'setoverprint', 'currentoverprint', 'setsmoothness',
+ 'currentsmoothness', 'currentcolor',
+ -- Device setup operators.
+ 'showpage', 'copypage', 'setpagedevice', 'currentpagedevice', 'nulldevice',
+ 'currenttrapparams', 'settrapparams', 'settrapzone',
+})
+local misc = token(l.CLASS, word_match{
+ -- Miscellaneous operators
+ 'defineresource', 'undefineresource', 'findresource', 'findcolorrendering',
+ 'resourcestatus', 'resourceforall', 'GetHalftoneName', 'GetPageDeviceName',
+ 'GetSubstituteCRD', 'save', 'restore', 'setglobal', 'setshared',
+ 'currentglobal', 'gcheck', 'scheck', 'startjob', 'defineuserobject',
+ 'execuserobject', 'undefineuserobject', 'UserObjects', 'bind', 'null',
+ 'version', 'realtime', 'usertime', 'languagelevel', 'product', 'revision',
+ 'serialnumber', 'executive', 'echo', 'prompt', 'setsystemparams',
+ 'currentsystemparams', 'setuserparams', 'currentuserparams', 'setdevparams',
+ 'currentdevparams', 'vmreclaim', 'setvmthreshold', 'vmstatus', 'cachestatus',
+ 'setcachelimit', 'setcacheparams', 'currentcacheparams', 'setucacheparams',
+ 'ucachestatus', 'currentshared', 'exitserver', 'serverdict',
+ -- File operators
+ 'file', 'filter', 'closefile', 'read', 'write', 'readhexstring',
+ 'writehexstring', 'readstring', 'writestring', 'readline', 'bytesavailable',
+ 'flush', 'flushfile', 'resetfile', 'status', 'run', 'currentfile',
+ 'deletefile', 'renamefile', 'filenameforall', 'setfileposition',
+ 'fileposition', 'print', 'stack', 'pstack', 'printobject', 'writeobject',
+ 'setobjectformat', 'currentobjectformat',
+ -- Errors.
+ 'configurationerror', 'dictfull', 'dictstackoverflow', 'dictstackunderflow',
+ 'execstackoverflow', 'handleerror', 'interrupt', 'invalidaccess',
+ 'invalidexit', 'invalidfileaccess', 'invalidfont', 'invalidrestore',
+ 'ioerror', 'limitcheck', 'nocurrentpoint', 'rangecheck', 'stackoverflow',
+ 'stackunderflow', 'syntaxerror', 'timeout', 'typecheck', 'undefined',
+ 'undefinedfilename', 'undefinedresource', 'undefinedresult', 'unmatchedmark',
+ 'unregistered', 'VMerror',
+})
+
+M._rules = {
+ {'whitespace', ws},
+ {'comment', comment},
+ {'number', number},
+ {'preprocessor', preproc},
+ {'label', label},
+ {'keyword', keyword},
+ {'function', func},
+ {'class', misc},
+ {'operator', operator},
+ {'string', str},
+ {'identifier', identifier},
+}
+
+return M
diff --git a/lexlua/pure.lua b/lexlua/pure.lua
new file mode 100644
index 000000000..87f456eaf
--- /dev/null
+++ b/lexlua/pure.lua
@@ -0,0 +1,50 @@
+-- Copyright 2015-2018 David B. Lamkins <david@lamkins.net>. See License.txt.
+-- pure LPeg lexer, see http://purelang.bitbucket.org/
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('pure')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Pragmas.
+local hashbang = lexer.starts_line('#!') * (lexer.nonnewline - '//')^0
+lex:add_rule('pragma', token(lexer.PREPROCESSOR, hashbang))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ namespace with end using interface extern let const def type public private
+ nonfix outfix infix infixl infixr prefix postfix if otherwise when case of
+ then else
+]]))
+
+-- Numbers.
+local bin = '0' * S('Bb') * S('01')^1
+local hex = '0' * S('Xx') * (R('09') + R('af') + R('AF'))^1
+local dec = R('09')^1
+local int = (bin + hex + dec) * P('L')^-1
+local rad = P('.') - '..'
+local exp = (S('Ee') * S('+-')^-1 * int)^-1
+local flt = int * (rad * dec)^-1 * exp + int^-1 * rad * dec * exp
+lex:add_rule('number', token(lexer.NUMBER, flt + int))
+
+-- Operators.
+local punct = S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\'')
+local dots = P('..')
+lex:add_rule('operator', token(lexer.OPERATOR, dots + punct))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true)))
+
+return lex
diff --git a/lexlua/python.lua b/lexlua/python.lua
new file mode 100644
index 000000000..c3e76ac7e
--- /dev/null
+++ b/lexlua/python.lua
@@ -0,0 +1,104 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Python LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('python', {fold_by_indentation = true})
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and as assert break class continue def del elif else except exec finally for
+ from global if import in is lambda nonlocal not or pass print raise return try
+ while with yield
+ -- Descriptors/attr access.
+ __get__ __set__ __delete__ __slots__
+ -- Class.
+ __new__ __init__ __del__ __repr__ __str__ __cmp__ __index__ __lt__ __le__
+ __gt__ __ge__ __eq__ __ne__ __hash__ __nonzero__ __getattr__ __getattribute__
+ __setattr__ __delattr__ __call__
+ -- Operator.
+ __add__ __sub__ __mul__ __div__ __floordiv__ __mod__ __divmod__ __pow__
+ __and__ __xor__ __or__ __lshift__ __rshift__ __nonzero__ __neg__ __pos__
+ __abs__ __invert__ __iadd__ __isub__ __imul__ __idiv__ __ifloordiv__ __imod__
+ __ipow__ __iand__ __ixor__ __ior__ __ilshift__ __irshift__
+ -- Conversions.
+ __int__ __long__ __float__ __complex__ __oct__ __hex__ __coerce__
+ -- Containers.
+ __len__ __getitem__ __missing__ __setitem__ __delitem__ __contains__ __iter__
+ __getslice__ __setslice__ __delslice__
+ -- Module and class attribs.
+ __doc__ __name__ __dict__ __file__ __path__ __module__ __bases__ __class__
+ __self__
+ -- Stdlib/sys.
+ __builtin__ __future__ __main__ __import__ __stdin__ __stdout__ __stderr__
+ -- Other.
+ __debug__ __doc__ __import__ __name__
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs all any apply basestring bool buffer callable chr classmethod cmp coerce
+ compile complex copyright credits delattr dict dir divmod enumerate eval
+ execfile exit file filter float frozenset getattr globals hasattr hash help
+ hex id input int intern isinstance issubclass iter len license list locals
+ long map max min object oct open ord pow property quit range raw_input reduce
+ reload repr reversed round set setattr slice sorted staticmethod str sum super
+ tuple type unichr unicode vars xrange zip
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ ArithmeticError AssertionError AttributeError BaseException DeprecationWarning
+ EOFError Ellipsis EnvironmentError Exception False FloatingPointError
+ FutureWarning GeneratorExit IOError ImportError ImportWarning IndentationError
+ IndexError KeyError KeyboardInterrupt LookupError MemoryError NameError None
+ NotImplemented NotImplementedError OSError OverflowError
+ PendingDeprecationWarning ReferenceError RuntimeError RuntimeWarning
+ StandardError StopIteration SyntaxError SyntaxWarning SystemError SystemExit
+ TabError True TypeError UnboundLocalError UnicodeDecodeError
+ UnicodeEncodeError UnicodeError UnicodeTranslateError UnicodeWarning
+ UserWarning ValueError Warning ZeroDivisionError
+]]))
+
+-- Self.
+lex:add_rule('self', token('self', P('self')))
+lex:add_style('self', lexer.STYLE_TYPE)
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline_esc^0))
+
+-- Strings.
+local sq_str = P('u')^-1 * lexer.delimited_range("'", true)
+local dq_str = P('U')^-1 * lexer.delimited_range('"', true)
+local triple_sq_str = "'''" * (lexer.any - "'''")^0 * P("'''")^-1
+local triple_dq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+-- TODO: raw_strs cannot end in single \.
+local raw_sq_str = P('u')^-1 * 'r' * lexer.delimited_range("'", false, true)
+local raw_dq_str = P('U')^-1 * 'R' * lexer.delimited_range('"', false, true)
+lex:add_rule('string', token(lexer.STRING, triple_sq_str + triple_dq_str +
+ sq_str + dq_str + raw_sq_str +
+ raw_dq_str))
+
+-- Numbers.
+local dec = lexer.digit^1 * S('Ll')^-1
+local bin = '0b' * S('01')^1 * ('_' * S('01')^1)^0
+local oct = '0' * R('07')^1 * S('Ll')^-1
+local integer = S('+-')^-1 * (bin + lexer.hex_num + oct + dec)
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer))
+
+-- Decorators.
+lex:add_rule('decorator', token('decorator', '@' * lexer.nonnewline^0))
+lex:add_style('decorator', lexer.STYLE_PREPROCESSOR)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~`')))
+
+return lex
diff --git a/lexlua/rails.lua b/lexlua/rails.lua
new file mode 100644
index 000000000..8d324acd7
--- /dev/null
+++ b/lexlua/rails.lua
@@ -0,0 +1,54 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Ruby on Rails LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rails', {inherit = lexer.load('ruby')})
+
+-- Whitespace
+lex:modify_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Functions.
+
+-- ActionPack.
+lex:modify_rule('function', token(lexer.FUNCTION, word_match[[
+ before_filter skip_before_filter skip_after_filter after_filter around_filter
+ filter filter_parameter_logging layout require_dependency render render_action
+ render_text render_file render_template render_nothing render_component
+ render_without_layout rescue_from url_for redirect_to redirect_to_path
+ redirect_to_url respond_to helper helper_method model service observer
+ serialize scaffold verify hide_action
+]]) +
+
+-- View helpers.
+token(lexer.FUNCTION, word_match[[
+ check_box content_for error_messages_for form_for fields_for file_field
+ hidden_field image_submit_tag label link_to password_field radio_button submit
+ text_field text_area
+]]) +
+
+-- ActiveRecord
+token(lexer.FUNCTION, word_match[[
+ after_create after_destroy after_save after_update after_validation
+ after_validation_on_create after_validation_on_update before_create
+ before_destroy before_save before_update before_validation
+ before_validation_on_create before_validation_on_update composed_of belongs_to
+ has_one has_many has_and_belongs_to_many validate validates validate_on_create
+ validates_numericality_of validate_on_update validates_acceptance_of
+ validates_associated validates_confirmation_of validates_each
+ validates_format_of validates_inclusion_of validates_exclusion_of
+ validates_length_of validates_presence_of validates_size_of
+ validates_uniqueness_of
+ attr_protected attr_accessible attr_readonly accepts_nested_attributes_for
+ default_scope scope
+]]) +
+
+-- ActiveSupport
+token(lexer.FUNCTION, word_match[[
+ alias_method_chain alias_attribute delegate cattr_accessor mattr_accessor
+ returning memoize
+]]) + lex:get_rule('function'))
+
+return lex
diff --git a/lexlua/rc.lua b/lexlua/rc.lua
new file mode 100644
index 000000000..868ddbc49
--- /dev/null
+++ b/lexlua/rc.lua
@@ -0,0 +1,54 @@
+-- Copyright 2017-2018 Michael Forney. See License.txt.
+-- rc LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rc')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ for in while if not switch fn builtin cd eval exec exit flag rfork shift
+ ulimit umask wait whatis . ~
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local str = lexer.delimited_range("'", false, true)
+local heredoc = '<<' * P(function(input, index)
+ local s, e, _, delimiter = input:find('[ \t]*(["\']?)([%w!"%%+,-./:?@_~]+)%1',
+ index)
+ if s == index and delimiter then
+ delimiter = delimiter:gsub('[%%+-.?]', '%%%1')
+ local _, e = input:find('[\n\r]'..delimiter..'[\n\r]', e)
+ return e and e + 1 or #input + 1
+ end
+end)
+lex:add_rule('string', token(lexer.STRING, str + heredoc))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.integer + lexer.float))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '$' * S('"#')^-1 *
+ ('*' + lexer.digit^1 +
+ lexer.word)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('@`=!<>*&^|;?()[]{}') +
+ '\\\n'))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/rebol.lua b/lexlua/rebol.lua
new file mode 100644
index 000000000..a6fc68e93
--- /dev/null
+++ b/lexlua/rebol.lua
@@ -0,0 +1,98 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Rebol LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rebol')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+local line_comment = ';' * lexer.nonnewline^0;
+local block_comment = 'comment' * P(' ')^-1 *
+ lexer.delimited_range('{}', false, true)
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abs absolute add and~ at back change clear complement copy cp divide fifth
+ find first fourth head insert last make max maximum min minimum multiply
+ negate next or~ pick poke power random remainder remove second select skip
+ sort subtract tail third to trim xor~
+ alias all any arccosine arcsine arctangent bind break browse call
+ caret-to-offset catch checksum close comment compose compress cosine debase
+ decompress dehex detab dh-compute-key dh-generate-key dh-make-key difference
+ disarm do dsa-generate-key dsa-make-key dsa-make-signature
+ dsa-verify-signature either else enbase entab exclude exit exp foreach form
+ free get get-modes halt hide if in intersect load log-10 log-2 log-e loop
+ lowercase maximum-of minimum-of mold not now offset-to-caret open parse prin
+ print protect q query quit read read-io recycle reduce repeat return reverse
+ rsa-encrypt rsa-generate-key rsa-make-key save secure set set-modes show sine
+ size-text square-root tangent textinfo throw to-hex to-local-file
+ to-rebol-file trace try union unique unprotect unset until update uppercase
+ use wait while write write-io
+ basic-syntax-header crlf font-fixed font-sans-serif font-serif list-words
+ outstr val value
+ about alert alter append array ask boot-prefs build-tag center-face change-dir
+ charset choose clean-path clear-fields confine confirm context cvs-date
+ cvs-version decode-cgi decode-url deflag-face delete demo desktop dirize
+ dispatch do-boot do-events do-face do-face-alt does dump-face dump-pane echo
+ editor emailer emit extract find-by-type find-key-face find-window flag-face
+ flash focus for forall forever forskip func function get-net-info get-style
+ has help hide-popup import-email inform input insert-event-func join launch
+ launch-thru layout license list-dir load-image load-prefs load-thru make-dir
+ make-face net-error open-events parse-email-addrs parse-header
+ parse-header-date parse-xml path-thru probe protect-system read-net read-thru
+ reboot reform rejoin remold remove-event-func rename repend replace request
+ request-color request-date request-download request-file request-list
+ request-pass request-text resend save-prefs save-user scroll-para send
+ set-font set-net set-para set-style set-user set-user-name show-popup source
+ split-path stylize switch throw-on-error to-binary to-bitset to-block to-char
+ to-date to-decimal to-email to-event to-file to-get-word to-hash to-idate
+ to-image to-integer to-issue to-list to-lit-path to-lit-word to-logic to-money
+ to-none to-pair to-paren to-path to-refinement to-set-path to-set-word
+ to-string to-tag to-time to-tuple to-url to-word unfocus uninstall unview
+ upgrade Usage vbug view view-install view-prefs what what-dir write-user
+ return at space pad across below origin guide tabs indent style styles size
+ sense backcolor do none
+ action? any-block? any-function? any-string? any-type? any-word? binary?
+ bitset? block? char? datatype? date? decimal? email? empty? equal? error?
+ even? event? file? function? get-word? greater-or-equal? greater? hash? head?
+ image? index? integer? issue? length? lesser-or-equal? lesser? library? list?
+ lit-path? lit-word? logic? money? native? negative? none? not-equal? number?
+ object? odd? op? pair? paren? path? port? positive? refinement? routine? same?
+ series? set-path? set-word? strict-equal? strict-not-equal? string? struct?
+ tag? tail? time? tuple? unset? url? word? zero? connected? crypt-strength?
+ exists-key? input? script? type? value? ? ?? dir? exists-thru? exists?
+ flag-face? found? in-window? info? inside? link-app? link? modified? offset?
+ outside? screen-offset? size? span? view? viewed? win-offset? within?
+ action! any-block! any-function! any-string! any-type! any-word! binary!
+ bitset! block! char! datatype! date! decimal! email! error! event! file!
+ function! get-word! hash! image! integer! issue! library! list! lit-path!
+ lit-word! logic! money! native! none! number! object! op! pair! paren! path!
+ port! refinement! routine! series! set-path! set-word! string! struct! symbol!
+ tag! time! tuple! unset! url! word!
+ true false self
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '-') *
+ (lexer.alnum + '-')^0))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"', true) +
+ lexer.delimited_range('{}') +
+ "'" * lexer.word))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+/*:()[]')))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '{', '}')
+lex:add_fold_point(lexer.COMMENT, ';', lexer.fold_line_comments(';'))
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+
+return lex
diff --git a/lexlua/rest.lua b/lexlua/rest.lua
new file mode 100644
index 000000000..b1af7c562
--- /dev/null
+++ b/lexlua/rest.lua
@@ -0,0 +1,259 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- reStructuredText LPeg lexer.
+
+local l = require('lexer')
+local token, word_match, starts_line = l.token, l.word_match, l.starts_line
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'rest'}
+
+-- Whitespace.
+local ws = token(l.WHITESPACE, S(' \t')^1 + l.newline^1)
+local any_indent = S(' \t')^0
+
+-- Section titles (2 or more characters).
+local adornment_chars = lpeg.C(S('!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~'))
+local adornment = lpeg.C(adornment_chars^2 * any_indent) * (l.newline + -1)
+local overline = lpeg.Cmt(starts_line(adornment), function(input, index, adm, c)
+ if not adm:find('^%'..c..'+%s*$') then return nil end
+ local rest = input:sub(index)
+ local lines = 1
+ for line, e in rest:gmatch('([^\r\n]+)()') do
+ if lines > 1 and line:match('^(%'..c..'+)%s*$') == adm then
+ return index + e - 1
+ end
+ if lines > 3 or #line > #adm then return nil end
+ lines = lines + 1
+ end
+ return #input + 1
+end)
+local underline = lpeg.Cmt(starts_line(adornment), function(_, index, adm, c)
+ local pos = adm:match('^%'..c..'+()%s*$')
+ return pos and index - #adm + pos - 1 or nil
+end)
+-- Token needs to be a predefined one in order for folder to work.
+local title = token(l.CONSTANT, overline + underline)
+
+-- Lists.
+local bullet_list = S('*+-') -- TODO: '•‣⁃', as lpeg does not support UTF-8
+local enum_list = P('(')^-1 *
+ (l.digit^1 + S('ivxlcmIVXLCM')^1 + l.alnum + '#') * S('.)')
+local field_list = ':' * (l.any - ':')^1 * P(':')^-1
+local option_word = l.alnum * (l.alnum + '-')^0
+local option = S('-/') * option_word * (' ' * option_word)^-1 +
+ '--' * option_word * ('=' * option_word)^-1
+local option_list = option * (',' * l.space^1 * option)^-1
+local list = #(l.space^0 * (S('*+-:/') + enum_list)) *
+ starts_line(token('list', l.space^0 * (option_list + bullet_list +
+ enum_list + field_list) *
+ l.space))
+
+-- Literal block.
+local block = P('::') * (l.newline + -1) * function(input, index)
+ local rest = input:sub(index)
+ local level, quote = #rest:match('^([ \t]*)')
+ for pos, indent, line in rest:gmatch('()[ \t]*()([^\r\n]+)') do
+ local no_indent = (indent - pos < level and line ~= ' ' or level == 0)
+ local quoted = no_indent and line:find(quote or '^%s*%W')
+ if quoted and not quote then quote = '^%s*%'..line:match('^%s*(%W)') end
+ if no_indent and not quoted and pos > 1 then return index + pos - 1 end
+ end
+ return #input + 1
+end
+local literal_block = token('literal_block', block)
+
+-- Line block.
+local line_block_char = token(l.OPERATOR, starts_line(any_indent * '|'))
+
+local word = l.alpha * (l.alnum + S('-.+'))^0
+
+-- Explicit markup blocks.
+local prefix = any_indent * '.. '
+local footnote_label = '[' * (l.digit^1 + '#' * word^-1 + '*') * ']'
+local footnote = token('footnote_block', prefix * footnote_label * l.space)
+local citation_label = '[' * word * ']'
+local citation = token('citation_block', prefix * citation_label * l.space)
+local link = token('link_block', prefix * '_' *
+ (l.delimited_range('`') + (P('\\') * 1 +
+ l.nonnewline - ':')^1) * ':' * l.space)
+local markup_block = #prefix * starts_line(footnote + citation + link)
+
+-- Directives.
+local directive_type = word_match({
+ -- Admonitions
+ 'attention', 'caution', 'danger', 'error', 'hint', 'important', 'note', 'tip',
+ 'warning', 'admonition',
+ -- Images
+ 'image', 'figure',
+ -- Body elements
+ 'topic', 'sidebar', 'line-block', 'parsed-literal', 'code', 'math', 'rubric',
+ 'epigraph', 'highlights', 'pull-quote', 'compound', 'container',
+ -- Table
+ 'table', 'csv-table', 'list-table',
+ -- Document parts
+ 'contents', 'sectnum', 'section-autonumbering', 'header', 'footer',
+ -- References
+ 'target-notes', 'footnotes', 'citations',
+ -- HTML-specific
+ 'meta',
+ -- Directives for substitution definitions
+ 'replace', 'unicode', 'date',
+ -- Miscellaneous
+ 'include', 'raw', 'class', 'role', 'default-role', 'title',
+ 'restructuredtext-test-directive',
+}, '-')
+local known_directive = token('directive',
+ prefix * directive_type * '::' * l.space)
+local sphinx_directive_type = word_match({
+ -- The TOC tree.
+ 'toctree',
+ -- Paragraph-level markup.
+ 'note', 'warning', 'versionadded', 'versionchanged', 'deprecated', 'seealso',
+ 'rubric', 'centered', 'hlist', 'glossary', 'productionlist',
+ -- Showing code examples.
+ 'highlight', 'literalinclude',
+ -- Miscellaneous
+ 'sectionauthor', 'index', 'only', 'tabularcolumns'
+}, '-')
+local sphinx_directive = token('sphinx_directive',
+ prefix * sphinx_directive_type * '::' * l.space)
+local unknown_directive = token('unknown_directive',
+ prefix * word * '::' * l.space)
+local directive = #prefix * starts_line(known_directive + sphinx_directive +
+ unknown_directive)
+
+-- Sphinx code block.
+local indented_block = function(input, index)
+ local rest = input:sub(index)
+ local level = #rest:match('^([ \t]*)')
+ for pos, indent, line in rest:gmatch('()[ \t]*()([^\r\n]+)') do
+ if indent - pos < level and line ~= ' ' or level == 0 and pos > 1 then
+ return index + pos - 1
+ end
+ end
+ return #input + 1
+end
+local code_block = prefix * 'code-block::' * S(' \t')^1 * l.nonnewline^0 *
+ (l.newline + -1) * indented_block
+local sphinx_block = #prefix * token('code_block', starts_line(code_block))
+
+-- Substitution definitions.
+local substitution = #prefix *
+ token('substitution',
+ starts_line(prefix * l.delimited_range('|') *
+ l.space^1 * word * '::' * l.space))
+
+-- Comments.
+local line_comment = prefix * l.nonnewline^0
+local bprefix = any_indent * '..'
+local block_comment = bprefix * l.newline * indented_block
+local comment = #bprefix *
+ token(l.COMMENT, starts_line(line_comment + block_comment))
+
+-- Inline markup.
+local em = token('em', l.delimited_range('*'))
+local strong = token('strong', ('**' * (l.any - '**')^0 * P('**')^-1))
+local role = token('role', ':' * word * ':' * (word * ':')^-1)
+local interpreted = role^-1 * token('interpreted', l.delimited_range('`')) *
+ role^-1
+local inline_literal = token('inline_literal',
+ '``' * (l.any - '``')^0 * P('``')^-1)
+local link_ref = token('link',
+ (word + l.delimited_range('`')) * '_' * P('_')^-1 +
+ '_' * l.delimited_range('`'))
+local footnote_ref = token('footnote', footnote_label * '_')
+local citation_ref = token('citation', citation_label * '_')
+local substitution_ref = token('substitution', l.delimited_range('|', true) *
+ ('_' * P('_')^-1)^-1)
+local link = token('link', l.alpha * (l.alnum + S('-.'))^1 * ':' *
+ (l.alnum + S('/.+-%@'))^1)
+local inline_markup = (strong + em + inline_literal + link_ref + interpreted +
+ footnote_ref + citation_ref + substitution_ref + link) *
+ -l.alnum
+
+-- Other.
+local non_space = token(l.DEFAULT, l.alnum * (l.any - l.space)^0)
+local escape = token(l.DEFAULT, '\\' * l.any)
+
+M._rules = {
+ {'literal_block', literal_block},
+ {'list', list},
+ {'markup_block', markup_block},
+ {'code_block', sphinx_block},
+ {'directive', directive},
+ {'substitution', substitution},
+ {'comment', comment},
+ {'title', title},
+ {'line_block_char', line_block_char},
+ {'whitespace', ws},
+ {'inline_markup', inline_markup},
+ {'non_space', non_space},
+ {'escape', escape}
+}
+
+M._tokenstyles = {
+ list = l.STYLE_TYPE,
+ literal_block = l.STYLE_EMBEDDED..',eolfilled',
+ footnote_block = l.STYLE_LABEL,
+ citation_block = l.STYLE_LABEL,
+ link_block = l.STYLE_LABEL,
+ directive = l.STYLE_KEYWORD,
+ sphinx_directive = l.STYLE_KEYWORD..',bold',
+ unknown_directive = l.STYLE_KEYWORD..',italics',
+ code_block = l.STYLE_EMBEDDED..',eolfilled',
+ substitution = l.STYLE_VARIABLE,
+ strong = 'bold',
+ em = 'italics',
+ role = l.STYLE_CLASS,
+ interpreted = l.STYLE_STRING,
+ inline_literal = l.STYLE_EMBEDDED,
+ link = 'underlined',
+ footnote = 'underlined',
+ citation = 'underlined',
+}
+
+local sphinx_levels = {
+ ['#'] = 0, ['*'] = 1, ['='] = 2, ['-'] = 3, ['^'] = 4, ['"'] = 5
+}
+
+-- Section-based folding.
+M._fold = function(text, start_pos, start_line, start_level)
+ local folds, line_starts = {}, {}
+ for pos in (text..'\n'):gmatch('().-\r?\n') do
+ line_starts[#line_starts + 1] = pos
+ end
+ local style_at, CONSTANT, level = l.style_at, l.CONSTANT, start_level
+ local sphinx = l.property_int['fold.by.sphinx.convention'] > 0
+ local FOLD_BASE = l.FOLD_BASE
+ local FOLD_HEADER, FOLD_BLANK = l.FOLD_HEADER, l.FOLD_BLANK
+ for i = 1, #line_starts do
+ local pos, next_pos = line_starts[i], line_starts[i + 1]
+ local c = text:sub(pos, pos)
+ local line_num = start_line + i - 1
+ folds[line_num] = level
+ if style_at[start_pos + pos] == CONSTANT and c:find('^[^%w%s]') then
+ local sphinx_level = FOLD_BASE + (sphinx_levels[c] or #sphinx_levels)
+ level = not sphinx and level - 1 or sphinx_level
+ if level < FOLD_BASE then level = FOLD_BASE end
+ folds[line_num - 1], folds[line_num] = level, level + FOLD_HEADER
+ level = (not sphinx and level or sphinx_level) + 1
+ elseif c == '\r' or c == '\n' then
+ folds[line_num] = level + FOLD_BLANK
+ end
+ end
+ return folds
+end
+
+l.property['fold.by.sphinx.convention'] = '0'
+
+--[[ Embedded languages.
+local bash = l.load('bash')
+local bash_indent_level
+local start_rule = #(prefix * 'code-block' * '::' * l.space^1 * 'bash' *
+ (l.newline + -1)) * sphinx_directive *
+ token('bash_begin', P(function(input, index)
+ bash_indent_level = #input:match('^([ \t]*)', index)
+ return index
+ end))]]
+
+return M
diff --git a/lexlua/rexx.lua b/lexlua/rexx.lua
new file mode 100644
index 000000000..8542bca8c
--- /dev/null
+++ b/lexlua/rexx.lua
@@ -0,0 +1,76 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Rexx LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rexx')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ address arg by call class do drop else end exit expose forever forward guard
+ if interpret iterate leave method nop numeric otherwise parse procedure pull
+ push queue raise reply requires return routine result rc say select self sigl
+ signal super then to trace use when while until
+]], true)))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match([[
+ abbrev abs address arg beep bitand bitor bitxor b2x center changestr charin
+ charout chars compare consition copies countstr c2d c2x datatype date delstr
+ delword digits directory d2c d2x errortext filespec form format fuzz insert
+ lastpos left length linein lineout lines max min overlay pos queued random
+ reverse right sign sourceline space stream strip substr subword symbol time
+ trace translate trunc value var verify word wordindex wordlength wordpos words
+ xrange x2b x2c x2d
+ rxfuncadd rxfuncdrop rxfuncquery rxmessagebox rxwinexec
+ sysaddrexxmacro sysbootdrive sysclearrexxmacrospace syscloseeventsem
+ sysclosemutexsem syscls syscreateeventsem syscreatemutexsem syscurpos
+ syscurstate sysdriveinfo sysdrivemap sysdropfuncs sysdroprexxmacro
+ sysdumpvariables sysfiledelete sysfilesearch sysfilesystemtype sysfiletree
+ sysfromunicode systounicode sysgeterrortext sysgetfiledatetime sysgetkey
+ sysini sysloadfuncs sysloadrexxmacrospace sysmkdir sysopeneventsem
+ sysopenmutexsem sysposteventsem syspulseeventsem sysqueryprocess
+ sysqueryrexxmacro sysreleasemutexsem sysreorderrexxmacro sysrequestmutexsem
+ sysreseteventsem sysrmdir syssaverexxmacrospace syssearchpath
+ syssetfiledatetime syssetpriority syssleep sysstemcopy sysstemdelete
+ syssteminsert sysstemsort sysswitchsession syssystemdirectory systempfilename
+ systextscreenread systextscreensize sysutilversion sysversion sysvolumelabel
+ syswaiteventsem syswaitnamedpipe syswindecryptfile syswinencryptfile syswinver
+]], true)))
+
+-- Identifiers.
+local word = lexer.alpha * (lexer.alnum + S('@#$\\.!?_'))^0
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true, true)
+local dq_str = lexer.delimited_range('"', true, true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * lexer.nonnewline_esc^0 +
+ lexer.nested_pair('/*', '*/')))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Preprocessor.
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
+ lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/\\*%&|^~.,:;(){}')))
+
+-- Fold points
+lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'select', 'return')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '--', lexer.fold_line_comments('--'))
+--lex:add_fold_point(lexer.OPERATOR, ':', ?)
+
+return lex
diff --git a/lexlua/rhtml.lua b/lexlua/rhtml.lua
new file mode 100644
index 000000000..ff76f2479
--- /dev/null
+++ b/lexlua/rhtml.lua
@@ -0,0 +1,20 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- RHTML LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rhtml', {inherit = lexer.load('html')})
+
+-- Embedded Ruby.
+local ruby = lexer.load('rails')
+local ruby_start_rule = token('rhtml_tag', '<%' * P('=')^-1)
+local ruby_end_rule = token('rhtml_tag', '%>')
+lex:embed(ruby, ruby_start_rule, ruby_end_rule)
+lex:add_style('rhtml_tag', lexer.STYLE_EMBEDDED)
+
+-- Fold points.
+lex:add_fold_point('rhtml_tag', '<%', '%>')
+
+return lex
diff --git a/lexlua/rstats.lua b/lexlua/rstats.lua
new file mode 100644
index 000000000..873854d6b
--- /dev/null
+++ b/lexlua/rstats.lua
@@ -0,0 +1,42 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- R LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rstats')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ break else for if in next repeat return switch try while
+ Inf NA NaN NULL FALSE TRUE
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ array character complex data.frame double factor function integer list logical
+ matrix numeric vector
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ P('i')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<->+*/^=.,:;|$()[]{}')))
+
+return lex
diff --git a/lexlua/ruby.lua b/lexlua/ruby.lua
new file mode 100644
index 000000000..f8e346fb5
--- /dev/null
+++ b/lexlua/ruby.lua
@@ -0,0 +1,132 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Ruby LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('ruby')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ BEGIN END alias and begin break case class def defined? do else elsif end
+ ensure false for if in module next nil not or redo rescue retry return self
+ super then true undef unless until when while yield __FILE__ __LINE__
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ at_exit autoload binding caller catch chop chop! chomp chomp! eval exec exit
+ exit! fail fork format gets global_variables gsub gsub! iterator? lambda load
+ local_variables loop open p print printf proc putc puts raise rand readline
+ readlines require select sleep split sprintf srand sub sub! syscall system
+ test trace_var trap untrace_var
+]]) * -S('.:|'))
+
+local word_char = lexer.alnum + S('_!?')
+
+-- Identifiers.
+local word = (lexer.alpha + '_') * word_char^0
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Comments.
+local line_comment = '#' * lexer.nonnewline_esc^0
+local block_comment = lexer.starts_line('=begin') *
+ (lexer.any - lexer.newline * '=end')^0 *
+ (lexer.newline * '=end')^-1
+lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment))
+
+local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}'}
+local literal_delimitted = P(function(input, index)
+ local delimiter = input:sub(index, index)
+ if not delimiter:find('[%w\r\n\f\t ]') then -- only non alpha-numerics
+ local match_pos, patt
+ if delimiter_matches[delimiter] then
+ -- Handle nested delimiter/matches in strings.
+ local s, e = delimiter, delimiter_matches[delimiter]
+ patt = lexer.delimited_range(s..e, false, false, true)
+ else
+ patt = lexer.delimited_range(delimiter)
+ end
+ match_pos = lpeg.match(patt, input, index)
+ return match_pos or #input + 1
+ end
+end)
+
+-- Strings.
+local cmd_str = lexer.delimited_range('`')
+local lit_cmd = '%x' * literal_delimitted
+local lit_array = '%w' * literal_delimitted
+local sq_str = lexer.delimited_range("'")
+local dq_str = lexer.delimited_range('"')
+local lit_str = '%' * S('qQ')^-1 * literal_delimitted
+local heredoc = '<<' * P(function(input, index)
+ local s, e, indented, _, delimiter =
+ input:find('(%-?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index)
+ if s == index and delimiter then
+ local end_heredoc = (#indented > 0 and '[\n\r\f]+ *' or '[\n\r\f]+')
+ local _, e = input:find(end_heredoc..delimiter, e)
+ return e and e + 1 or #input + 1
+ end
+end)
+-- TODO: regex_str fails with `obj.method /patt/` syntax.
+local regex_str = #P('/') * lexer.last_char_includes('!%^&*([{-=+|:;,?<>~') *
+ lexer.delimited_range('/', true, false) * S('iomx')^0
+local lit_regex = '%r' * literal_delimitted * S('iomx')^0
+lex:add_rule('string', token(lexer.STRING, (sq_str + dq_str + lit_str +
+ heredoc + cmd_str + lit_cmd +
+ lit_array) * S('f')^-1) +
+ token(lexer.REGEX, regex_str + lit_regex))
+
+-- Numbers.
+local dec = lexer.digit^1 * ('_' * lexer.digit^1)^0 * S('ri')^-1
+local bin = '0b' * S('01')^1 * ('_' * S('01')^1)^0
+local integer = S('+-')^-1 * (bin + lexer.hex_num + lexer.oct_num + dec)
+-- TODO: meta, control, etc. for numeric_literal.
+local numeric_literal = '?' * (lexer.any - lexer.space) * -word_char
+lex:add_rule('number', token(lexer.NUMBER, lexer.float * S('ri')^-1 + integer +
+ numeric_literal))
+
+-- Variables.
+local global_var = '$' * (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + lexer.digit +
+ '-' * S('0FadiIKlpvw'))
+local class_var = '@@' * word
+local inst_var = '@' * word
+lex:add_rule('variable', token(lexer.VARIABLE, global_var + class_var +
+ inst_var))
+
+-- Symbols.
+lex:add_rule('symbol', token('symbol', ':' * P(function(input, index)
+ if input:sub(index - 2, index - 2) ~= ':' then return index end
+end) * (word_char^1 + sq_str + dq_str)))
+lex:add_style('symbol', lexer.STYLE_CONSTANT)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~')))
+
+-- Fold points.
+local function disambiguate(text, pos, line, s)
+ return line:sub(1, s - 1):match('^%s*$') and
+ not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and 1 or 0
+end
+lex:add_fold_point(lexer.KEYWORD, 'begin', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'class', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'def', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'do', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'for', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'module', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'case', 'end')
+lex:add_fold_point(lexer.KEYWORD, 'if', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'while', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'unless', disambiguate)
+lex:add_fold_point(lexer.KEYWORD, 'until', disambiguate)
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '=begin', '=end')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/rust.lua b/lexlua/rust.lua
new file mode 100644
index 000000000..16172620f
--- /dev/null
+++ b/lexlua/rust.lua
@@ -0,0 +1,68 @@
+-- Copyright 2015-2018 Alejandro Baez (https://keybase.io/baez). See License.txt.
+-- Rust LPeg lexer.
+
+local lexer = require("lexer")
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('rust')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract alignof as become box break const continue crate do else enum extern
+ false final fn for if impl in let loop macro match mod move mut offsetof
+ override priv proc pub pure ref return Self self sizeof static struct super
+ trait true type typeof unsafe unsized use virtual where while yield
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, lexer.word^1 * S("!")))
+
+-- Library types
+lex:add_rule('library', token(lexer.LABEL, lexer.upper *
+ (lexer.lower + lexer.dec_num)^1))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ () bool isize usize char str u8 u16 u32 u64 i8 i16 i32 i64 f32 f64
+]]))
+
+-- Strings.
+local sq_str = P('L')^-1 * lexer.delimited_range("'")
+local dq_str = P('L')^-1 * lexer.delimited_range('"')
+local raw_str = '#"' * (lexer.any - '#')^0 * P('#')^-1
+lex:add_rule('string', token(lexer.STRING, dq_str + raw_str))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER,
+ lexer.float +
+ P('0b')^-1 * (lexer.dec_num + "_")^1 +
+ lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR,
+ S('+-/*%<>!=`^~@&|?#~:;,.()[]{}')))
+
+-- Attributes.
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR,
+ "#[" * (lexer.nonnewline - ']')^0 *
+ P("]")^-1))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+return lex
diff --git a/lexlua/sass.lua b/lexlua/sass.lua
new file mode 100644
index 000000000..5a1bd944b
--- /dev/null
+++ b/lexlua/sass.lua
@@ -0,0 +1,24 @@
+-- Copyright 2006-2018 Robert Gieseke. See License.txt.
+-- Sass CSS preprocessor LPeg lexer.
+-- http://sass-lang.com
+
+local lexer = require('lexer')
+local token = lexer.token
+local P, S = lpeg.P, lpeg.S
+
+local lex = lexer.new('sass', {inherit = lexer.load('css')})
+
+-- Line comments.
+lex:add_rule('line_comment', token(lexer.COMMENT, '//' * lexer.nonnewline^0))
+
+-- Variables.
+lex:add_rule('variable', token(lexer.VARIABLE, '$' * (lexer.alnum + S('_-'))^1))
+
+-- Mixins.
+lex:add_rule('mixin', token('mixin', P('@') * lexer.word))
+lex:add_style('mixin', lexer.STYLE_FUNCTION)
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/scala.lua b/lexlua/scala.lua
new file mode 100644
index 000000000..c7119c321
--- /dev/null
+++ b/lexlua/scala.lua
@@ -0,0 +1,61 @@
+-- Copyright 2006-2018 JMS. See License.txt.
+-- Scala LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('scala')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Classes.
+lex:add_rule('class', token(lexer.KEYWORD, P('class')) * ws^1 *
+ token(lexer.CLASS, lexer.word))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ abstract case catch class def do else extends false final finally for forSome
+ if implicit import lazy match new null object override package private
+ protected return sealed super this throw trait try true type val var while
+ with yield
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ Array Boolean Buffer Byte Char Collection Double Float Int Iterator LinkedList
+ List Long Map None Option Set Short SortedMap SortedSet String TreeMap TreeSet
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('('))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local symbol = "'" * lexer.word
+local dq_str = lexer.delimited_range('"', true)
+local tq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+lex:add_rule('string', token(lexer.STRING, tq_str + symbol + dq_str))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlFfDd')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/scheme.lua b/lexlua/scheme.lua
new file mode 100644
index 000000000..2050b26c9
--- /dev/null
+++ b/lexlua/scheme.lua
@@ -0,0 +1,80 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Scheme LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('scheme')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ and begin case cond cond-expand define define-macro delay do else fluid-let if
+ lambda let let* letrec or quasiquote quote set!
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ abs acos angle append apply asin assoc assq assv atan car cdr caar cadr cdar
+ cddr caaar caadr cadar caddr cdaar cdadr cddar cdddr
+ call-with-current-continuation call-with-input-file call-with-output-file
+ call-with-values call/cc catch ceiling char->integer char-downcase char-upcase
+ close-input-port close-output-port cons cos current-input-port
+ current-output-port delete-file display dynamic-wind eval exit exact->inexact
+ exp expt file-or-directory-modify-seconds floor force for-each gcd gensym
+ get-output-string getenv imag-part integer->char lcm length list list->string
+ list->vector list-ref list-tail load log magnitude make-polar make-rectangular
+ make-string make-vector map max member memq memv min modulo newline nil not
+ number->string open-input-file open-input-string open-output-file
+ open-output-string peek-char quotient read read-char read-line real-part
+ remainder reverse reverse! round set-car! set-cdr! sin sqrt string
+ string->list string->number string->symbol string-append string-copy
+ string-fill! string-length string-ref string-set! substring symbol->string
+ system tan truncate values vector vector->list vector-fill! vector-length
+ vector-ref vector-set! with-input-from-file with-output-to-file write
+ write-char
+ boolean? char-alphabetic? char-ci<=? char-ci<? char-ci=? char-ci>=? char-ci>?
+ char-lower-case? char-numeric? char-ready? char-upper-case? char-whitespace?
+ char<=? char<? char=? char>=? char>? char? complex? eof-object? eq? equal?
+ eqv? even? exact? file-exists? inexact? input-port? integer? list? negative?
+ null? number? odd? output-port? pair? port? positive? procedure? rational?
+ real? string-ci<=? string-ci<? string-ci=? string-ci>=? string-ci>? string<=?
+ string<? string=? string>=? string>? string? symbol? vector? zero?
+ #t #f
+]]))
+
+local word = (lexer.alpha + S('-!?')) * (lexer.alnum + S('-!?'))^0
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, word))
+
+-- Strings.
+local literal = (P("'") + '#' * S('\\bdox')) * lexer.word
+local dq_str = lexer.delimited_range('"')
+lex:add_rule('string', token(lexer.STRING, literal + dq_str))
+
+-- Comments.
+local line_comment = ';' * lexer.nonnewline^0
+local block_comment = '#|' * (lexer.any - '|#')^0 * P('|#')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 *
+ (S('./') * lexer.digit^1)^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('<>=*/+-`@%:()')))
+
+-- Entity.
+lex:add_rule('entity', token('entity', '&' * word))
+lex:add_style('entity', lexer.STYLE_VARIABLE)
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.COMMENT, '#|', '|#')
+lex:add_fold_point(lexer.COMMENT, ';', lexer.fold_line_comments(';'))
+
+return lex
diff --git a/lexlua/smalltalk.lua b/lexlua/smalltalk.lua
new file mode 100644
index 000000000..3cf0d2656
--- /dev/null
+++ b/lexlua/smalltalk.lua
@@ -0,0 +1,46 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Smalltalk LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('smalltalk')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ true false nil self super isNil not Smalltalk Transcript
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ Date Time Boolean True False Character String Array Symbol Integer Object
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ '$' * lexer.word))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT,
+ lexer.delimited_range('"', false, true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(':=_<>+-/*!()[]')))
+
+-- Labels.
+lex:add_rule('label', token(lexer.LABEL, '#' * lexer.word))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '[', ']')
+
+return lex
diff --git a/lexlua/sml.lua b/lexlua/sml.lua
new file mode 100644
index 000000000..4b8faf625
--- /dev/null
+++ b/lexlua/sml.lua
@@ -0,0 +1,113 @@
+-- Copyright 2017-2018 Murray Calavera. See License.txt.
+-- Standard ML LPeg lexer.
+
+local lexer = require('lexer')
+local token = lexer.token
+
+function mlword(words)
+ return lexer.word_match(words, "'")
+end
+
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+-- single line comments are valid in successor ml
+local cl = '(*)' * lexer.nonnewline^0
+local comment = token(lexer.COMMENT, cl + lexer.nested_pair('(*', '*)'))
+
+local string = token(lexer.STRING, lpeg.P('#')^-1 *
+ lexer.delimited_range('"', true))
+
+local function num(digit)
+ return digit * (digit^0 * lpeg.P('_'))^0 * digit^1 + digit
+end
+
+local int = num(lexer.digit)
+local frac = lpeg.P('.') * int
+local minus = lpeg.P('~')^-1
+local exp = lpeg.S('eE') * minus * int
+local real = int * frac^-1 * exp + int * frac * exp^-1
+local hex = num(lexer.xdigit)
+local bin = num(lpeg.S('01'))
+
+local number = token(lexer.NUMBER,
+ lpeg.P('0w') * int
+ + (lpeg.P('0wx') + lpeg.P('0xw')) * hex
+ + (lpeg.P('0wb') + lpeg.P('0bw')) * bin
+ + minus * lpeg.P('0x') * hex
+ + minus * lpeg.P('0b') * bin
+ + minus * real
+ + minus * int
+)
+
+local keyword = token(lexer.KEYWORD, mlword{
+ 'abstype', 'and', 'andalso', 'as', 'case', 'do', 'datatype', 'else', 'end',
+ 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix', 'infixr', 'let',
+ 'local', 'nonfix', 'of', 'op', 'orelse', 'raise', 'rec', 'then',
+ 'type', 'val', 'with', 'withtype', 'while',
+
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
+ 'struct', 'structure'
+})
+
+-- includes valid symbols for identifiers
+local operator = token(lexer.OPERATOR,
+ lpeg.S('!*/+-^:@=<>()[]{},;._|#%&$?~`\\'))
+
+local type = token(lexer.TYPE, mlword{
+ 'int', 'real', 'word', 'bool', 'char', 'string', 'unit',
+ 'array', 'exn', 'list', 'option', 'order', 'ref', 'substring', 'vector'
+})
+
+-- `real`, `vector` and `substring` are a problem
+local func = token(lexer.FUNCTION, mlword{
+ 'app', 'before', 'ceil', 'chr', 'concat', 'exnMessage', 'exnName',
+ 'explode', 'floor', 'foldl', 'foldr', 'getOpt', 'hd', 'ignore',
+ 'implode', 'isSome', 'length', 'map', 'not', 'null', 'ord', 'print',
+ 'real', 'rev', 'round', 'size', 'str', 'substring', 'tl', 'trunc',
+ 'valOf', 'vector',
+ 'o', 'abs', 'mod', 'div'
+})
+
+-- non-symbolic identifiers only
+local id = (lexer.alnum + "'" + '_')^0
+local aid = lexer.alpha * id
+local longid = (aid * lpeg.P('.'))^0 * aid
+local identifier = token(lexer.IDENTIFIER, lexer.lower * id)
+local typevar = token(lexer.VARIABLE, "'" * id)
+local c = mlword{'true', 'false', 'nil'}
+local const = token(lexer.CONSTANT, lexer.upper * id + c)
+local structure = token(lexer.CLASS, aid * lpeg.P('.'))
+
+local open
+ = token(lexer.KEYWORD, mlword{'open', 'structure', 'functor'})
+ * ws * token(lexer.CLASS, longid)
+
+local struct_dec
+ = token(lexer.KEYWORD, lpeg.P('structure')) * ws
+ * token(lexer.CLASS, aid) * ws
+ * token(lexer.OPERATOR, lpeg.P('=')) * ws
+
+local struct_new = struct_dec * token(lexer.KEYWORD, lpeg.P('struct'))
+local struct_alias = struct_dec * token(lexer.CLASS, longid)
+
+local M = {_NAME = 'sml'}
+
+M._rules = {
+ {'whitespace', ws},
+ {'comment', comment},
+ {'number', number},
+ {'struct_new', struct_new},
+ {'struct_alias', struct_alias},
+ {'structure', structure},
+ {'open', open},
+ {'type', type},
+ {'keyword', keyword},
+ {'function', func},
+ {'string', string},
+ {'operator', operator},
+ {'typevar', typevar},
+ {'constant', const},
+ {'identifier', identifier},
+}
+
+return M
diff --git a/lexlua/snobol4.lua b/lexlua/snobol4.lua
new file mode 100644
index 000000000..9c8682bc2
--- /dev/null
+++ b/lexlua/snobol4.lua
@@ -0,0 +1,65 @@
+-- Copyright 2013-2018 Michael T. Richter. See License.txt.
+-- SNOBOL4 lexer.
+-- This lexer works with classic SNOBOL4 as well as the CSNOBOL4 extensions.
+
+local lexer = require 'lexer'
+local token, word_match = lexer.token, lexer.word_match
+local B, P, R, S, V = lpeg.B, lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local M = { _NAME = 'snobol4' }
+
+-- Helper patterns.
+local dotted_id = lexer.word * (P'.' * lexer.word)^0
+
+local dq_str = lexer.delimited_range('"', true, true)
+local sq_str = lexer.delimited_range("'", true, true)
+
+local branch = B(lexer.space * P':(') * dotted_id * #P')'
+local sbranch = B(lexer.space * P':' * S'SF' * '(') * dotted_id * #P')'
+local sbranchx = B(P')' * S'SF' * P'(') * dotted_id * #P')'
+
+-- Token definitions.
+local bif = token(lexer.FUNCTION, word_match({
+ 'APPLY', 'ARRAY', 'CHAR', 'CONVERT', 'COPY', 'DATA', 'DATE', 'DIFFER', 'DUPL',
+ 'EQ', 'EVAL', 'FILE_ABSPATH', 'FILE_ISDIR', 'FREEZE', 'FUNCTION', 'GE', 'GT',
+ 'HOST', 'IDENT', 'INTEGER', 'IO_FINDUNIT', 'ITEM', 'LABEL', 'LOAD', 'LPAD',
+ 'LE', 'LGT', 'LT', 'NE', 'OPSYN', 'ORD', 'PROTOTYPE', 'REMDR', 'REPLACE',
+ 'REVERSE', 'RPAD', 'RSORT', 'SERV_LISTEN', 'SET', 'SETEXIT', 'SIZE', 'SORT',
+ 'SQRT', 'SSET', 'SUBSTR', 'TABLE', 'THAW', 'TIME', 'TRACE', 'TRIM', 'UNLOAD',
+ 'VALUE', 'VDIFFER',
+}, '', true) * #lexer.delimited_range('()', false, true, true))
+local comment = token(lexer.COMMENT,
+ lexer.starts_line(S'*#|;!' * lexer.nonnewline^0))
+local control = token(lexer.PREPROCESSOR, lexer.starts_line(P'-' * lexer.word))
+local identifier = token(lexer.DEFAULT, dotted_id)
+local keyword = token(lexer.KEYWORD, word_match({
+ 'ABORT', 'ARRAY', 'CONTINUE', 'DEFINE', 'END', 'FRETURN', 'INPUT', 'NRETURN',
+ 'OUTPUT', 'PUNCH', 'RETURN', 'SCONTINUE', 'TABLE',
+}, '', true) + P'&' * lexer.word)
+local label = token(lexer.LABEL, lexer.starts_line(dotted_id))
+local number = token(lexer.NUMBER, lexer.float + lexer.integer)
+local operator = token(lexer.OPERATOR, S'¬?$.!%*/#+-@⊥&^~\\=')
+local pattern = lexer.token(lexer.CLASS, word_match({ -- keep distinct
+ 'ABORT', 'ANY', 'ARB', 'ARBNO', 'BAL', 'BREAK', 'BREAKX', 'FAIL', 'FENCE',
+ 'LEN', 'NOTANY', 'POS', 'REM', 'RPOS', 'RTAB', 'SPAN', 'SUCCEED', 'TAB',
+}, '', true) * #lexer.delimited_range('()', false, true, true))
+local str = token(lexer.STRING, sq_str + dq_str)
+local target = token(lexer.LABEL, branch + sbranch + sbranchx)
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+M._rules = {
+ { 'comment', comment },
+ { 'control', control },
+ { 'string', str },
+ { 'number', number },
+ { 'keyword', keyword },
+ { 'label', label },
+ { 'target', target },
+ { 'pattern', pattern },
+ { 'built-in', bif },
+ { 'operator', operator },
+ { 'identifier', identifier },
+ { 'whitespace', ws },
+}
+
+return M
diff --git a/lexlua/sql.lua b/lexlua/sql.lua
new file mode 100644
index 000000000..23bf44173
--- /dev/null
+++ b/lexlua/sql.lua
@@ -0,0 +1,59 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- SQL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('sql')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ add all alter analyze and as asc asensitive before between bigint binary blob
+ both by call cascade case change char character check collate column condition
+ connection constraint continue convert create cross current_date current_time
+ current_timestamp current_user cursor database databases day_hour
+ day_microsecond day_minute day_second dec decimal declare default delayed
+ delete desc describe deterministic distinct distinctrow div double drop dual
+ each else elseif enclosed escaped exists exit explain false fetch float for
+ force foreign from fulltext goto grant group having high_priority
+ hour_microsecond hour_minute hour_second if ignore in index infile inner inout
+ insensitive insert int integer interval into is iterate join key keys kill
+ leading leave left like limit lines load localtime localtimestamp lock long
+ longblob longtext loop low_priority match mediumblob mediumint mediumtext
+ middleint minute_microsecond minute_second mod modifies natural not
+ no_write_to_binlog null numeric on optimize option optionally or order out
+ outer outfile precision primary procedure purge read reads real references
+ regexp rename repeat replace require restrict return revoke right rlike schema
+ schemas second_microsecond select sensitive separator set show smallint soname
+ spatial specific sql sqlexception sqlstate sqlwarning sql_big_result
+ sql_calc_found_rows sql_small_result ssl starting straight_join table
+ terminated text then tinyblob tinyint tinytext to trailing trigger true undo
+ union unique unlock unsigned update usage use using utc_date utc_time
+ utc_timestamp values varbinary varchar varcharacter varying when where while
+ with write xor year_month zerofill
+]], true)))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"') +
+ lexer.delimited_range('`')))
+
+-- Comments.
+local line_comment = (P('--') + '#') * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S(',()')))
+
+return lex
diff --git a/lexlua/taskpaper.lua b/lexlua/taskpaper.lua
new file mode 100644
index 000000000..060e1cbc3
--- /dev/null
+++ b/lexlua/taskpaper.lua
@@ -0,0 +1,60 @@
+-- Copyright (c) 2016-2018 Larry Hynes. See License.txt.
+-- Taskpaper LPeg lexer
+
+local lexer = require('lexer')
+local token = lexer.token
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'taskpaper'}
+
+local delimiter = P(' ') + P('\t')
+
+-- Whitespace
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+-- Tags
+local day_tag = token('day_tag', (P('@today') + P('@tomorrow')))
+
+local overdue_tag = token('overdue_tag', P('@overdue'))
+
+local plain_tag = token('plain_tag', P('@') * lexer.word)
+
+local extended_tag = token('extended_tag',
+ P('@') * lexer.word * P('(') *
+ (lexer.word + R('09') + P('-'))^1 * P(')'))
+
+-- Projects
+local project = token('project',
+ lexer.nested_pair(lexer.starts_line(lexer.alnum), ':') *
+ lexer.newline)
+
+-- Notes
+local note = token('note', delimiter^1 * lexer.alnum * lexer.nonnewline^0)
+
+-- Tasks
+local task = token('task', delimiter^1 * P('-') + lexer.newline)
+
+M._rules = {
+ {'note', note},
+ {'task', task},
+ {'project', project},
+ {'extended_tag', extended_tag},
+ {'day_tag', day_tag},
+ {'overdue_tag', overdue_tag},
+ {'plain_tag', plain_tag},
+ {'whitespace', ws},
+}
+
+M._tokenstyles = {
+ note = lexer.STYLE_CONSTANT,
+ task = lexer.STYLE_FUNCTION,
+ project = lexer.STYLE_TAG,
+ extended_tag = lexer.STYLE_COMMENT,
+ day_tag = lexer.STYLE_CLASS,
+ overdue_tag = lexer.STYLE_PREPROCESSOR,
+ plain_tag = lexer.STYLE_COMMENT,
+}
+
+M._LEXBYLINE = true
+
+return M
diff --git a/lexlua/tcl.lua b/lexlua/tcl.lua
new file mode 100644
index 000000000..5ac96f85f
--- /dev/null
+++ b/lexlua/tcl.lua
@@ -0,0 +1,49 @@
+-- Copyright 2014-2018 Joshua Krämer. See License.txt.
+-- Tcl LPeg lexer.
+-- This lexer follows the TCL dodekalogue (http://wiki.tcl.tk/10259).
+-- It is based on the previous lexer by Mitchell.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('tcl')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comment.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * P(function(input, index)
+ local i = index - 2
+ while i > 0 and input:find('^[ \t]', i) do i = i - 1 end
+ if i < 1 or input:find('^[\r\n;]', i) then return index end
+end) * lexer.nonnewline^0))
+
+-- Separator (semicolon).
+lex:add_rule('separator', token(lexer.CLASS, P(';')))
+
+-- Argument expander.
+lex:add_rule('expander', token(lexer.LABEL, P('{*}')))
+
+-- Delimiters.
+lex:add_rule('braces', token(lexer.KEYWORD, S('{}')))
+lex:add_rule('quotes', token(lexer.FUNCTION, '"'))
+lex:add_rule('brackets', token(lexer.VARIABLE, S('[]')))
+
+-- Variable substitution.
+lex:add_rule('variable', token(lexer.STRING, '$' *
+ (lexer.alnum + '_' + P(':')^2)^0))
+
+-- Backslash substitution.
+lex:add_rule('backslash', token(lexer.TYPE,
+ '\\' * (lexer.digit * lexer.digit^-2 +
+ 'x' * lexer.xdigit^1 +
+ 'u' * lexer.xdigit * lexer.xdigit^-3 +
+ 'U' * lexer.xdigit * lexer.xdigit^-7 +
+ 1)))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/template.txt b/lexlua/template.txt
new file mode 100644
index 000000000..730479384
--- /dev/null
+++ b/lexlua/template.txt
@@ -0,0 +1,38 @@
+-- ? LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('?')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ keyword1 keyword2 keyword3
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%^=<>,.{}[]()')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'start', 'end')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '#', lexer.fold_line_comments('#'))
+
+return lex
diff --git a/lexlua/tex.lua b/lexlua/tex.lua
new file mode 100644
index 000000000..185c543ee
--- /dev/null
+++ b/lexlua/tex.lua
@@ -0,0 +1,34 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Plain TeX LPeg lexer.
+-- Modified by Robert Gieseke.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('tex')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0))
+
+-- TeX environments.
+lex:add_rule('environment', token('environment', '\\' * (P('begin') + 'end') *
+ lexer.word))
+lex:add_style('environment', lexer.STYLE_KEYWORD)
+
+-- Commands.
+lex:add_rule('command', token(lexer.KEYWORD, '\\' *
+ (lexer.alpha^1 + S('#$&~_^%{}'))))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('$&#{}[]')))
+
+-- Fold points.
+lex:add_fold_point(lexer.COMMENT, '%', lexer.fold_line_comments('%'))
+lex:add_fold_point('environment', '\\begin', '\\end')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+
+return lex
diff --git a/lexlua/texinfo.lua b/lexlua/texinfo.lua
new file mode 100644
index 000000000..9a742f251
--- /dev/null
+++ b/lexlua/texinfo.lua
@@ -0,0 +1,222 @@
+-- Copyright 2014-2018 stef@ailleurs.land. See License.txt.
+-- Plain Texinfo version 5.2 LPeg lexer
+-- Freely inspired from Mitchell work and valuable help from him too !
+
+-- Directives are processed (more or less) in the Reference Card Texinfo order
+-- Reference Card page for each directive group is in comment for reference
+
+--[[
+Note: Improving Fold Points use with Texinfo
+
+At the very beginning of your Texinfo file, it could be wised to insert theses
+alias :
+
+@alias startchapter = comment
+@alias endchapter = comment
+
+Then use this to begin each chapter :
+
+@endchapter --------------------------------------------------------------------
+@chapter CHAPTER TITLE
+@startchapter ------------------------------------------------------------------
+
+With the use of Scintilla's `SCI_FOLDALL(SC_FOLDACTION_TOGGLE)` or Textadept's
+`buffer:fold_all(buffer.FOLDACTION_TOGGLE)`, you have then a nice chapter
+folding, useful with large documents.
+]]
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('texinfo')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Directives.
+local directives_base = word_match([[
+ end
+ -- Custom keywords for chapter folding
+ startchapter endchapter
+ -- List and tables (page 2, column 2)
+ itemize enumerate
+ -- Beginning a Texinfo document (page 1, column 1)
+ titlepage copying
+ -- Block environments (page 2, column 1)
+ cartouche
+ -- Block environments > Displays using fixed-width fonts (page 2, column 2)
+ example smallexample
+ -- List and tables (page 2, column 2)
+ multitable
+ -- Floating Displays (page 2, column 3)
+ float listoffloats caption shortcaption image
+ -- Floating Displays > Footnotes (page 2, column 3)
+ footnote footnotestyle
+ -- Conditionally (in)visible text > Output formats (page 3, column 3)
+ ifdocbook ifhtml ifinfo ifplaintext iftex ifxml ifnotdocbook ifnothtml
+ ifnotplaintext ifnottex ifnotxml ifnotinfo inlinefmt inlinefmtifelse
+ -- Conditionally (in)visible text > Raw formatter text (page 4, column 1)
+ docbook html tex xml inlineraw
+ -- Conditionally (in)visible text > Documents variables (page 4, column 1)
+ set clear value ifset ifclear inlineifset inlineifclear
+ -- Conditionally (in)visible text > Testing for commands (page 4, column 1)
+ ifcommanddefined ifcommandnotdefined end
+ -- Defining new Texinfo commands (page 4, column 1)
+ alias macro unmacro definfounclose
+ -- File inclusion (page 4, column 1)
+ include verbatiminclude
+ -- Formatting and headers footers for TeX (page 4, column 1)
+ allowcodebreaks finalout fonttextsize
+ -- Formatting and headers footers for TeX > paper size (page 4, column 2)
+ smallbook afourpaper afivepaper afourlatex afourwide pagesizes
+ -- Formatting and headers footers for TeX > Page headers and footers (page 4,
+ -- column 2)
+ -- not implemented
+ -- Document preferences (page 4, column 2)
+ -- not implemented
+ -- Ending a Texinfo document (page 4, column 2)
+ bye
+]], true)
+lex:add_rule('directive', token('directives', ('@end' * lexer.space^1 + '@') *
+ directives_base))
+lex:add_style('directives', lexer.STYLE_FUNCTION)
+
+-- Chapters.
+local chapters_base = word_match([[
+ -- Chapter structuring (page 1, column 2)
+ lowersections raisesections part
+ -- Chapter structuring > Numbered, included in contents (page 1, column 2)
+ chapter centerchap
+ -- Chapter structuring > Context-dependent, included in contents (page 1,
+ -- column 2)
+ section subsection subsubsection
+ -- Chapter structuring > Unumbered, included in contents (page 1, column 2)
+ unnumbered unnumberedsec unnumberedsubsec unnumberedsubsection
+ unnumberedsubsubsec unnumberedsubsubsection
+ -- Chapter structuring > Letter and numbered, included in contents (page 1,
+ -- column 2)
+ appendix appendixsec appendixsection appendixsubsec appendixsubsection
+ appendixsubsubsec appendixsubsubsection
+ -- Chapter structuring > Unumbered, not included in contents, no new page
+ -- (page 1, column 3)
+ chapheading majorheading heading subheading subsubheading
+]], true)
+lex:add_rule('chapter', token('chapters', ('@end' * lexer.space^1 + '@') *
+ chapters_base))
+lex:add_style('chapters', lexer.STYLE_CLASS)
+
+-- Common keywords.
+local keyword_base = word_match([[
+ end
+ -- Beginning a Texinfo document (page 1, column 1)
+ setfilename settitle insertcopying
+ -- Beginning a Texinfo document > Internationlization (page 1, column 1)
+ documentencoding documentlanguage frenchspacing
+ -- Beginning a Texinfo document > Info directory specification and HTML
+ -- document description (page 1, column 1)
+ dircategory direntry documentdescription
+ -- Beginning a Texinfo document > Titre pages (page 1, column 1)
+ shorttitlepage center titlefont title subtitle author
+ -- Beginning a Texinfo document > Tables of contents (page 1, column 2)
+ shortcontents summarycontents contents setcontentsaftertitlepage
+ setshortcontentsaftertitlepage
+ -- Nodes (page 1, column 2)
+ node top anchor novalidate
+ -- Menus (page 1, column 2)
+ menu detailmenu
+ -- Cross references > Within the Info system (page 1, column 3)
+ xref pxref ref inforef xrefautomaticsectiontitle
+ -- Cross references > Outside of info (page 1, column 3)
+ url cite
+ -- Marking text > Markup for regular text (page 1, column 3)
+ var dfn acronym abbr
+ -- Marking text > Markup for litteral text (page 1, column 3)
+ code file command env option kbd key email indicateurl samp verb
+ -- Marking text > GUI sequences (page 2, column 1)
+ clicksequence click clickstyle arrow
+ -- Marking text > Math (page 2, column 1)
+ math minus geq leq
+ -- Marking text > Explicit font selection (page 2, column 1)
+ sc r i slanted b sansserif t
+ -- Block environments (page 2, column 1)
+ noindent indent exdent
+ -- Block environments > Normally filled displays using regular text fonts
+ -- (page 2, column 1)
+ quotation smallquotation indentedblock smallindentedblock raggedright
+ -- Block environments > Line-for-line displays using regular test fonts (page
+ -- 2, column 2)
+ format smallformat display smalldisplay flushleft flushright
+ -- Block environments > Displays using fixed-width fonts (page 2, column 2)
+ lisp smalllisp verbatim
+ -- List and tables (page 2, column 2)
+ table ftable vtable tab item itemx headitem headitemfont asis
+ -- Indices (page 2, column 3)
+ cindex findex vindex kindex pindex tindex defcodeindex syncodeindex synindex
+ printindex
+ -- Insertions within a paragraph > Characters special to Texinfo (page 2,
+ -- column 3)
+ @ { } backslashcar comma hashcar : . ? ! dmn
+ -- Insertions within a paragraph > Accents (page 3, column 1)
+ -- not implemented
+ -- Insertions within a paragraph > Non-English characters (page 3, column 1)
+ -- not implemented
+ -- Insertions within a paragraph > Other text characters an logos (page 3,
+ -- column 1)
+ bullet dots enddots euro pounds textdegree copyright registeredsymbol TeX
+ LaTeX today guillemetleft guillementright guillemotleft guillemotright
+ -- Insertions within a paragraph > Glyphs for code examples (page 3, column 2)
+ equiv error expansion point print result
+ -- Making and preventing breaks (page 3, column 2)
+ * / - hyphenation tie w refill
+ -- Vertical space (page 3, column 2)
+ sp page need group vskip
+ -- Definition commands (page 3, column 2)
+ -- not implemented
+]], true)
+lex:add_rule('keyword', token(lexer.KEYWORD, ('@end' * lexer.space^1 + '@') *
+ keyword_base))
+
+-- Italics
+lex:add_rule('emph', token('emph',
+ '@emph' *
+ lexer.delimited_range('{}', false, true, true)))
+lex:add_style('emph', lexer.STYLE_STRING..',italics')
+
+-- Bold
+lex:add_rule('strong', token('strong',
+ '@strong' *
+ lexer.delimited_range('{}', false, true, true)))
+lex:add_style('strong', lexer.STYLE_STRING..',bold')
+
+-- Identifiers
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('{}', false, true, true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Comments.
+local line_comment = '@c' * lexer.nonnewline_esc^0
+--local line_comment_long = '@comment' * lexer.nonnewline_esc^0
+local block_comment = '@ignore' * (lexer.any - '@end ignore')^0 *
+ P('@end ignore')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Fold points.
+lex:add_fold_point('directives', '@titlepage', '@end titlepage')
+lex:add_fold_point('directives', '@copying', '@end copying')
+lex:add_fold_point('directives', '@ifset', '@end ifset')
+lex:add_fold_point('directives', '@tex', '@end tex')
+lex:add_fold_point('directives', '@itemize', '@end itemize')
+lex:add_fold_point('directives', '@enumerate', '@end enumerate')
+lex:add_fold_point('directives', '@multitable', '@end multitable')
+lex:add_fold_point('directives', '@example', '@end example')
+lex:add_fold_point('directives', '@smallexample', '@end smallexample')
+lex:add_fold_point('directives', '@cartouche', '@end cartouche')
+lex:add_fold_point('directives', '@startchapter', '@end startchapter')
+
+return lex
diff --git a/lexlua/text.lua b/lexlua/text.lua
new file mode 100644
index 000000000..a3b367190
--- /dev/null
+++ b/lexlua/text.lua
@@ -0,0 +1,4 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Text LPeg lexer.
+
+return require('lexer').new('text')
diff --git a/lexlua/themes/curses.lua b/lexlua/themes/curses.lua
new file mode 100644
index 000000000..2162a3724
--- /dev/null
+++ b/lexlua/themes/curses.lua
@@ -0,0 +1,55 @@
+-- Copyright 2007-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Curses theme for Lua lexers.
+-- Contributions by Ana Balan.
+
+local property = require('lexer').property
+
+-- Normal colors.
+property['color.black'] = '#000000'
+property['color.red'] = '#800000'
+property['color.green'] = '#008000'
+property['color.yellow'] = '#808000'
+property['color.blue'] = '#000080'
+property['color.magenta'] = '#800080'
+property['color.cyan'] = '#008080'
+property['color.white'] = '#C0C0C0'
+
+-- Light colors. (16 color terminals only.)
+-- These only apply to 16 color terminals. For other terminals, set the
+-- style's `bold` attribute to use the light color variant.
+property['color.light_black'] = '#404040'
+property['color.light_red'] = '#FF0000'
+property['color.light_green'] = '#00FF00'
+--property['color.light_yellow'] = '#FFFF00'
+property['color.light_blue'] = '#0000FF'
+property['color.light_magenta'] = '#FF00FF'
+--property['color.light_cyan'] = '#0000FF'
+property['color.light_white'] = '#FFFFFF'
+
+-- Predefined styles.
+property['style.default'] = 'fore:$(color.white),back:$(color.black)'
+property['style.linenumber'] = ''
+property['style.bracelight'] = 'fore:$(color.black),back:$(color.white)'
+property['style.controlchar'] = ''
+property['style.indentguide'] = ''
+property['style.calltip'] = '$(style.default)'
+property['style.folddisplaytext'] = 'fore:$(color.black),bold'
+
+-- Token styles.
+property['style.class'] = 'fore:$(color.yellow)'
+property['style.comment'] = 'fore:$(color.black),bold'
+property['style.constant'] = 'fore:$(color.red)'
+property['style.embedded'] = '$(style.keyword),back:$(color.black)'
+property['style.error'] = 'fore:$(color.red),bold'
+property['style.function'] = 'fore:$(color.blue)'
+property['style.identifier'] = ''
+property['style.keyword'] = 'fore:$(color.white),bold'
+property['style.label'] = 'fore:$(color.red),bold'
+property['style.number'] = 'fore:$(color.cyan)'
+property['style.operator'] = 'fore:$(color.yellow)'
+property['style.preprocessor'] = 'fore:$(color.magenta)'
+property['style.regex'] = 'fore:$(color.green),bold'
+property['style.string'] = 'fore:$(color.green)'
+property['style.type'] = 'fore:$(color.magenta),bold'
+property['style.variable'] = 'fore:$(color.blue),bold'
+property['style.whitespace'] = ''
diff --git a/lexlua/themes/dark.lua b/lexlua/themes/dark.lua
new file mode 100644
index 000000000..8d68d20ae
--- /dev/null
+++ b/lexlua/themes/dark.lua
@@ -0,0 +1,89 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Dark theme for Lua lexers.
+-- Contributions by Ana Balan.
+
+local property = require('lexer').property
+
+-- Greyscale colors.
+--property['color.dark_black'] = '#000000'
+property['color.black'] = '#1A1A1A'
+property['color.light_black'] = '#333333'
+--property['color.grey_black'] = '#4D4D4D'
+property['color.dark_grey'] = '#666666'
+--property['color.grey'] = '#808080'
+property['color.light_grey'] = '#999999'
+--property['color.grey_white'] = '#B3B3B3'
+property['color.dark_white'] = '#CCCCCC'
+--property['color.white'] = '#E6E6E6'
+--property['color.light_white'] = '#FFFFFF'
+
+-- Dark colors.
+--property['color.dark_red'] = '#661A1A'
+--property['color.dark_yellow'] = '#66661A'
+--property['color.dark_green'] = '#1A661A'
+--property['color.dark_teal'] = '#1A6666'
+--property['color.dark_purple'] = '#661A66'
+--property['color.dark_orange'] = '#B3661A'
+--property['color.dark_pink'] = '#B36666'
+--property['color.dark_lavender'] = '#6666B3'
+--property['color.dark_blue'] = '#1A66B3'
+
+-- Normal colors.
+property['color.red'] = '#994D4D'
+property['color.yellow'] = '#99994D'
+property['color.green'] = '#4D994D'
+property['color.teal'] = '#4D9999'
+property['color.purple'] = '#994D99'
+property['color.orange'] = '#E6994D'
+--property['color.pink'] = '#E69999'
+property['color.lavender'] = '#9999E6'
+property['color.blue'] = '#4D99E6'
+
+-- Light colors.
+property['color.light_red'] = '#CC8080'
+property['color.light_yellow'] = '#CCCC80'
+property['color.light_green'] = '#80CC80'
+--property['color.light_teal'] = '#80CCCC'
+--property['color.light_purple'] = '#CC80CC'
+--property['color.light_orange'] = '#FFCC80'
+--property['color.light_pink'] = '#FFCCCC'
+--property['color.light_lavender'] = '#CCCCFF'
+property['color.light_blue'] = '#80CCFF'
+
+-- Default style.
+property['font'], property['fontsize'] = 'Bitstream Vera Sans Mono', 10
+if WIN32 then
+ property['font'] = 'Courier New'
+elseif OSX then
+ property['font'], property['fontsize'] = 'Monaco', 12
+end
+
+-- Predefined styles.
+property['style.default'] = 'font:$(font),size:$(fontsize),'..
+ 'fore:$(color.light_grey),back:$(color.black)'
+property['style.linenumber'] = 'fore:$(color.dark_grey),back:$(color.black)'
+property['style.bracelight'] = 'fore:$(color.light_blue)'
+property['style.bracebad'] = 'fore:$(color.light_red)'
+property['style.controlchar'] = ''
+property['style.indentguide'] = 'fore:$(color.light_black)'
+property['style.calltip'] = 'fore:$(color.light_grey),back:$(color.light_black)'
+property['style.folddisplaytext'] = 'fore:$(color.dark_grey)'
+
+-- Token styles.
+property['style.class'] = 'fore:$(color.light_yellow)'
+property['style.comment'] = 'fore:$(color.dark_grey)'
+property['style.constant'] = 'fore:$(color.red)'
+property['style.embedded'] = '$(style.keyword),back:$(color.light_black)'
+property['style.error'] = 'fore:$(color.red),italics'
+property['style.function'] = 'fore:$(color.blue)'
+property['style.identifier'] = ''
+property['style.keyword'] = 'fore:$(color.dark_white)'
+property['style.label'] = 'fore:$(color.orange)'
+property['style.number'] = 'fore:$(color.teal)'
+property['style.operator'] = 'fore:$(color.yellow)'
+property['style.preprocessor'] = 'fore:$(color.purple)'
+property['style.regex'] = 'fore:$(color.light_green)'
+property['style.string'] = 'fore:$(color.green)'
+property['style.type'] = 'fore:$(color.lavender)'
+property['style.variable'] = 'fore:$(color.light_blue)'
+property['style.whitespace'] = ''
diff --git a/lexlua/themes/light.lua b/lexlua/themes/light.lua
new file mode 100644
index 000000000..644953198
--- /dev/null
+++ b/lexlua/themes/light.lua
@@ -0,0 +1,89 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Light theme for Lua lexers.
+-- Contributions by Ana Balan.
+
+local property = require('lexer').property
+
+-- Greyscale colors.
+--property['color.dark_black'] = '#000000'
+--property['color.black'] = '#1A1A1A'
+property['color.light_black'] = '#333333'
+--property['color.grey_black'] = '#4D4D4D'
+--property['color.dark_grey'] = '#666666'
+property['color.grey'] = '#808080'
+--property['color.light_grey'] = '#999999'
+--property['grey_white'] = '#B3B3B3'
+property['color.dark_white'] = '#CCCCCC'
+property['color.white'] = '#E6E6E6'
+--property['color.light_white'] = '#FFFFFF'
+
+-- Dark colors.
+--property['color.dark_red'] = '#661A1A'
+property['color.dark_yellow'] = '#66661A'
+property['color.dark_green'] = '#1A661A'
+--property['color.dark_teal'] = '#1A6666'
+--property['color.dark_purple'] = '#661A66'
+property['color.dark_orange'] = '#B3661A'
+--property['color.dark_pink'] = '#B36666'
+property['color.dark_lavender'] = '#6666B3'
+property['color.dark_blue'] = '#1A66B3'
+
+-- Normal colors.
+property['color.red'] = '#994D4D'
+property['color.yellow'] = '#99994D'
+property['color.green'] = '#4D994D'
+property['color.teal'] = '#4D9999'
+property['color.purple'] = '#994D99'
+--property['color.orange'] = '#E6994D'
+--property['color.pink'] = '#E69999'
+property['color.lavender'] = '#9999E6'
+--property['color.blue'] = '#4D99E6'
+
+-- Light colors.
+property['color.light_red'] = '#C08080'
+--property['color.light_yellow'] = '#CCCC80'
+--property['color.light_green'] = '#80CC80'
+--property['color.light_teal'] = '#80CCCC'
+--property['color.light_purple'] = '#CC80CC'
+--property['color.light_orange'] = '#FFCC80'
+--property['color.light_pink'] = '#FFCCCC'
+--property['color.light_lavender'] = '#CCCCFF'
+property['color.light_blue'] = '#80CCFF'
+
+-- Default style.
+property['font'], property['fontsize'] = 'Bitstream Vera Sans Mono', 10
+if WIN32 then
+ property['font'] = 'Courier New'
+elseif OSX then
+ property['font'], property['fontsize'] = 'Monaco', 12
+end
+
+-- Predefined styles.
+property['style.default'] = 'font:$(font),size:$(fontsize),'..
+ 'fore:$(color.light_black),back:$(color.white)'
+property['style.linenumber'] = 'fore:$(color.grey),back:$(color.white)'
+property['style.bracelight'] = 'fore:$(color.light_blue)'
+property['style.bracebad'] = 'fore:$(color.light_red)'
+property['style.controlchar'] = ''
+property['style.indentguide'] = 'fore:$(color.dark_white)'
+property['style.calltip'] = 'fore:$(color.light_black),back:$(color.dark_white)'
+property['style.folddisplaytext'] = 'fore:$(color.grey)'
+
+-- Token styles.
+property['style.class'] = 'fore:$(color.yellow)'
+property['style.comment'] = 'fore:$(color.grey)'
+property['style.constant'] = 'fore:$(color.red)'
+property['style.embedded'] = '$(style.keyword),back:$(color.dark_white)'
+property['style.error'] = 'fore:$(color.red),italics'
+property['style.function'] = 'fore:$(color.dark_orange)'
+property['style.identifier'] = ''
+property['style.keyword'] = 'fore:$(color.dark_blue)'
+property['style.label'] = 'fore:$(color.dark_orange)'
+property['style.number'] = 'fore:$(color.teal)'
+property['style.operator'] = 'fore:$(color.purple)'
+property['style.preprocessor'] = 'fore:$(color.dark_yellow)'
+property['style.regex'] = 'fore:$(color.dark_green)'
+property['style.string'] = 'fore:$(color.green)'
+property['style.type'] = 'fore:$(color.lavender)'
+property['style.variable'] = 'fore:$(color.dark_lavender)'
+property['style.whitespace'] = ''
diff --git a/lexlua/themes/scite.lua b/lexlua/themes/scite.lua
new file mode 100644
index 000000000..741b07e92
--- /dev/null
+++ b/lexlua/themes/scite.lua
@@ -0,0 +1,53 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- SciTE theme for Lua lexers.
+
+local property = require('lexer').property
+
+property['color.red'] = '#7F0000'
+property['color.yellow'] = '#7F7F00'
+property['color.green'] = '#007F00'
+property['color.teal'] = '#007F7F'
+property['color.purple'] = '#7F007F'
+property['color.orange'] = '#B07F00'
+property['color.blue'] = '#00007F'
+property['color.black'] = '#000000'
+property['color.grey'] = '#808080'
+property['color.white'] = '#FFFFFF'
+
+-- Default style.
+property['font'], property['fontsize'] = 'Monospace', 11
+if WIN32 then
+ property['font'] = 'Courier New'
+elseif OSX then
+ property['font'], property['fontsize'] = 'Monaco', 12
+end
+
+-- Predefined styles.
+property['style.default'] = 'font:$(font),size:$(fontsize),'..
+ 'fore:$(color.black),back:$(color.white)'
+property['style.linenumber'] = 'back:#C0C0C0'
+property['style.bracelight'] = 'fore:#0000FF,bold'
+property['style.bracebad'] = 'fore:#FF0000,bold'
+property['style.controlchar'] = ''
+property['style.indentguide'] = 'fore:#C0C0C0,back:$(color.white)'
+property['style.calltip'] = 'fore:$(color.white),back:#444444'
+property['style.folddisplaytext'] = ''
+
+-- Token styles.
+property['style.class'] = 'fore:$(color.black),bold'
+property['style.comment'] = 'fore:$(color.green)'
+property['style.constant'] = 'fore:$(color.teal),bold'
+property['style.embedded'] = 'fore:$(color.blue)'
+property['style.error'] = 'fore:$(color.red)'
+property['style.function'] = 'fore:$(color.black),bold'
+property['style.identifier'] = ''
+property['style.keyword'] = 'fore:$(color.blue),bold'
+property['style.label'] = 'fore:$(color.teal),bold'
+property['style.number'] = 'fore:$(color.teal)'
+property['style.operator'] = 'fore:$(color.black),bold'
+property['style.preprocessor'] = 'fore:$(color.yellow)'
+property['style.regex'] = '$(style.string)'
+property['style.string'] = 'fore:$(color.purple)'
+property['style.type'] = 'fore:$(color.blue)'
+property['style.variable'] = 'fore:$(color.black)'
+property['style.whitespace'] = ''
diff --git a/lexlua/toml.lua b/lexlua/toml.lua
new file mode 100644
index 000000000..ba8ec9be1
--- /dev/null
+++ b/lexlua/toml.lua
@@ -0,0 +1,53 @@
+-- Copyright 2015-2018 Alejandro Baez (https://keybase.io/baez). See License.txt.
+-- TOML LPeg lexer.
+
+local lexer = require("lexer")
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('toml', {fold_by_indentation = true})
+
+-- Whitespace
+lex:add_rule('indent', #lexer.starts_line(S(' \t')) *
+ (token(lexer.WHITESPACE, ' ') +
+ token('indent_error', '\t'))^1)
+lex:add_rule('whitespace', token(lexer.WHITESPACE, S(' \t')^1 +
+ lexer.newline^1))
+lex:add_style('indent_error', 'back:%(color.red)')
+
+-- kewwords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[true false]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"')))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '#' * lexer.nonnewline^0))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('#=+-,.{}[]()')))
+
+-- Datetime.
+lex:add_rule('datetime',
+ token('timestamp',
+ lexer.digit * lexer.digit * lexer.digit * lexer.digit * -- yr
+ '-' * lexer.digit * lexer.digit^-1 * -- month
+ '-' * lexer.digit * lexer.digit^-1 * -- day
+ ((S(' \t')^1 + S('tT'))^-1 * -- separator
+ lexer.digit * lexer.digit^-1 * -- hour
+ ':' * lexer.digit * lexer.digit * -- minute
+ ':' * lexer.digit * lexer.digit * -- second
+ ('.' * lexer.digit^0)^-1 * -- fraction
+ ('Z' + -- timezone
+ S(' \t')^0 * S('-+') * lexer.digit * lexer.digit^-1 *
+ (':' * lexer.digit * lexer.digit)^-1)^-1)^-1))
+lex:add_style('timestamp', lexer.STYLE_NUMBER)
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+return lex
diff --git a/lexlua/vala.lua b/lexlua/vala.lua
new file mode 100644
index 000000000..ebc930392
--- /dev/null
+++ b/lexlua/vala.lua
@@ -0,0 +1,60 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Vala LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('vala')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ class delegate enum errordomain interface namespace signal struct using
+ -- Modifiers.
+ abstract const dynamic extern inline out override private protected public ref
+ static virtual volatile weak
+ -- Other.
+ as base break case catch construct continue default delete do else ensures
+ finally for foreach get if in is lock new requires return set sizeof switch
+ this throw throws try typeof value var void while
+ -- Etc.
+ null true false
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bool char double float int int8 int16 int32 int64 long short size_t ssize_t
+ string uchar uint uint8 uint16 uint32 uint64 ulong unichar ushort
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true)
+local dq_str = lexer.delimited_range('"', true)
+local tq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1
+local ml_str = '@' * lexer.delimited_range('"', false, true)
+lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str + ml_str))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('uUlLfFdDmM')^-1))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/vb.lua b/lexlua/vb.lua
new file mode 100644
index 000000000..170b49321
--- /dev/null
+++ b/lexlua/vb.lua
@@ -0,0 +1,53 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- VisualBasic LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('vb')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ -- Control.
+ If Then Else ElseIf While Wend For To Each In Step Case Select Return Continue
+ Do Until Loop Next With Exit
+ -- Operators.
+ Mod And Not Or Xor Is
+ -- Storage types.
+ Call Class Const Dim ReDim Preserve Function Sub Property End Set Let Get New
+ Randomize Option Explicit On Error Execute
+ -- Storage modifiers.
+ Private Public Default
+ -- Constants.
+ Empty False Nothing Null True
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match([[
+ Boolean Byte Char Date Decimal Double Long Object Short Single String
+]], true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT,
+ (P("'") + word_match([[rem]], true)) *
+ lexer.nonnewline^0))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"', true, true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlUuFf')^-2))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=><+-*^&:.,_()')))
+
+return lex
diff --git a/lexlua/vbscript.lua b/lexlua/vbscript.lua
new file mode 100644
index 000000000..acc59df60
--- /dev/null
+++ b/lexlua/vbscript.lua
@@ -0,0 +1,53 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- VisualBasic LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('vbscript')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match([[
+ -- Control.
+ If Then Else ElseIf While Wend For To Each In Step Case Select Return Continue
+ Do Until Loop Next With Exit
+ -- Operators.
+ Mod And Not Or Xor Is
+ -- Storage types.
+ Call Class Const Dim ReDim Preserve Function Sub Property End Set Let Get New
+ Randomize Option Explicit On Error Execute
+ -- Storage modifiers.
+ Private Public Default
+ -- Constants.
+ Empty False Nothing Null True
+]], true)))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match([[
+ Boolean Byte Char Date Decimal Double Long Object Short Single String
+]], true)))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT,
+ (P("'") + word_match([[rem]], true)) *
+ lexer.nonnewline^0))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING,
+ lexer.delimited_range('"', true, true)))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
+ S('LlUuFf')^-2))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=><+-*^&:.,_()')))
+
+return lex
diff --git a/lexlua/vcard.lua b/lexlua/vcard.lua
new file mode 100644
index 000000000..453d27a27
--- /dev/null
+++ b/lexlua/vcard.lua
@@ -0,0 +1,101 @@
+-- Copyright (c) 2015-2018 Piotr Orzechowski [drzewo.org]. See License.txt.
+-- vCard 2.1, 3.0 and 4.0 LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'vcard'}
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+
+-- Required properties.
+local required_property = token(lexer.KEYWORD, word_match({
+ 'BEGIN', 'END', 'FN', 'N' --[[ Not required in v4.0. ]], 'VERSION'
+}, nil, true)) * #P(':')
+
+-- Supported properties.
+local supported_property = token(lexer.TYPE, word_match({
+ 'ADR', 'AGENT' --[[ Not supported in v4.0. ]],
+ 'ANNIVERSARY' --[[ Supported in v4.0 only. ]], 'BDAY',
+ 'CALADRURI' --[[ Supported in v4.0 only. ]],
+ 'CALURI' --[[ Supported in v4.0 only. ]], 'CATEGORIES',
+ 'CLASS' --[[ Supported in v3.0 only. ]],
+ 'CLIENTPIDMAP' --[[ Supported in v4.0 only. ]], 'EMAIL', 'END',
+ 'FBURL' --[[ Supported in v4.0 only. ]],
+ 'GENDER' --[[ Supported in v4.0 only. ]], 'GEO',
+ 'IMPP' --[[ Not supported in v2.1. ]], 'KEY',
+ 'KIND' --[[ Supported in v4.0 only. ]],
+ 'LABEL' --[[ Not supported in v4.0. ]],
+ 'LANG' --[[ Supported in v4.0 only. ]], 'LOGO',
+ 'MAILER' --[[ Not supported in v4.0. ]],
+ 'MEMBER' --[[ Supported in v4.0 only. ]],
+ 'NAME' --[[ Supported in v3.0 only. ]],
+ 'NICKNAME' --[[ Not supported in v2.1. ]], 'NOTE', 'ORG', 'PHOTO',
+ 'PRODID' --[[ Not supported in v2.1. ]],
+ 'PROFILE' --[[ Not supported in v4.0. ]],
+ 'RELATED' --[[ Supported in v4.0 only. ]], 'REV', 'ROLE',
+ 'SORT-STRING' --[[ Not supported in v4.0. ]], 'SOUND', 'SOURCE', 'TEL',
+ 'TITLE', 'TZ', 'UID', 'URL', 'XML' --[[ Supported in v4.0 only. ]]
+}, nil, true)) * #S(':;')
+
+local identifier = lexer.alpha^1 * lexer.digit^0 * (P('-') * lexer.alnum^1)^0
+
+-- Extension.
+local extension = token(lexer.TYPE, lexer.starts_line(S('xX') * P('-') *
+ identifier * #S(':;')))
+
+-- Parameter.
+local parameter = token(lexer.IDENTIFIER,
+ lexer.starts_line(identifier * #S(':='))) +
+ token(lexer.STRING, identifier) * #S(':=')
+
+-- Operators.
+local operator = token(lexer.OPERATOR, S('.:;='))
+
+-- Group and property.
+local group_sequence = token(lexer.CONSTANT, lexer.starts_line(identifier)) *
+ token(lexer.OPERATOR, P('.')) *
+ (required_property + supported_property +
+ lexer.token(lexer.TYPE, S('xX') * P('-') * identifier) *
+ #S(':;'))
+-- Begin vCard, end vCard.
+local begin_sequence = token(lexer.KEYWORD, P('BEGIN')) *
+ token(lexer.OPERATOR, P(':')) *
+ token(lexer.COMMENT, P('VCARD'))
+local end_sequence = token(lexer.KEYWORD, P('END')) *
+ token(lexer.OPERATOR, P(':')) *
+ token(lexer.COMMENT, P('VCARD'))
+
+-- vCard version (in v3.0 and v4.0 must appear immediately after BEGIN:VCARD).
+local version_sequence = token(lexer.KEYWORD, P('VERSION')) *
+ token(lexer.OPERATOR, P(':')) *
+ token(lexer.CONSTANT, lexer.digit^1 *
+ (P('.') * lexer.digit^1)^-1)
+
+-- Data.
+local data = token(lexer.IDENTIFIER, lexer.any)
+
+-- Rules.
+M._rules = {
+ {'whitespace', ws},
+ {'begin_sequence', begin_sequence},
+ {'end_sequence', end_sequence},
+ {'version_sequence', version_sequence},
+ {'group_sequence', group_sequence},
+ {'required_property', required_property},
+ {'supported_property', supported_property},
+ {'extension', extension},
+ {'parameter', parameter},
+ {'operator', operator},
+ {'data', data},
+}
+
+-- Folding.
+M._foldsymbols = {
+ _patterns = {'BEGIN', 'END'},
+ [lexer.KEYWORD] = {['BEGIN'] = 1, ['END'] = -1}
+}
+
+return M
diff --git a/lexlua/verilog.lua b/lexlua/verilog.lua
new file mode 100644
index 000000000..63af4c61c
--- /dev/null
+++ b/lexlua/verilog.lua
@@ -0,0 +1,86 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- Verilog LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('verilog')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ always assign begin case casex casez default deassign disable else end endcase
+ endfunction endgenerate endmodule endprimitive endspecify endtable endtask for
+ force forever fork function generate if initial join macromodule module
+ negedge posedge primitive repeat release specify table task wait while
+ -- Compiler directives.
+ `include `define `undef `ifdef `ifndef `else `endif `timescale `resetall
+ `signed `unsigned `celldefine `endcelldefine `default_nettype
+ `unconnected_drive `nounconnected_drive `protect `endprotect `protected
+ `endprotected `remove_gatename `noremove_gatename `remove_netname
+ `noremove_netname `expand_vectornets `noexpand_vectornets
+ `autoexpand_vectornets
+ -- Signal strengths.
+ strong0 strong1 pull0 pull1 weak0 weak1 highz0 highz1 small medium large
+]]))
+
+-- Function.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ $stop $finish $time $stime $realtime $settrace $cleartrace $showscopes
+ $showvars $monitoron $monitoroff $random $printtimescale $timeformat $display
+ -- Built-in primitives.
+ and nand or nor xor xnor buf bufif0 bufif1 not notif0 notif1 nmos pmos cmos
+ rnmos rpmos rcmos tran tranif0 tranif1 rtran rtranif0 rtranif1 pullup pulldown
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ integer reg time realtime defparam parameter event wire wand wor tri triand
+ trior tri0 tri1 trireg vectored scalared input output inout supply0 supply1
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range('"')))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+local bin_suffix = S('bB') * S('01_xXzZ')^1
+local oct_suffix = S('oO') * S('01234567_xXzZ')^1
+local dec_suffix = S('dD') * S('0123456789_xXzZ')^1
+local hex_suffix = S('hH') * S('0123456789abcdefABCDEF_xXzZ')^1
+lex:add_rule('number', token(lexer.NUMBER, (lexer.digit + '_')^1 +
+ "'" * (bin_suffix + oct_suffix +
+ dec_suffix + hex_suffix)))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=~+-/*<>%&|^~,:;()[]{}')))
+
+-- Fold points.
+lex:add_fold_point(lexer.KEYWORD, 'case', 'endcase')
+lex:add_fold_point(lexer.KEYWORD, 'casex', 'endcase')
+lex:add_fold_point(lexer.KEYWORD, 'casez', 'endcase')
+lex:add_fold_point(lexer.KEYWORD, 'function', 'endfunction')
+lex:add_fold_point(lexer.KEYWORD, 'fork', 'join')
+lex:add_fold_point(lexer.KEYWORD, 'table', 'endtable')
+lex:add_fold_point(lexer.KEYWORD, 'task', 'endtask')
+lex:add_fold_point(lexer.KEYWORD, 'generate', 'endgenerate')
+lex:add_fold_point(lexer.KEYWORD, 'specify', 'endspecify')
+lex:add_fold_point(lexer.KEYWORD, 'primitive', 'endprimitive')
+lex:add_fold_point(lexer.KEYWORD, 'module', 'endmodule')
+lex:add_fold_point(lexer.KEYWORD, 'begin', 'end')
+lex:add_fold_point(lexer.OPERATOR, '(', ')')
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+
+return lex
diff --git a/lexlua/vhdl.lua b/lexlua/vhdl.lua
new file mode 100644
index 000000000..ea5ff3768
--- /dev/null
+++ b/lexlua/vhdl.lua
@@ -0,0 +1,69 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- VHDL LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('vhdl')
+
+-- Whitespace.
+lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ access after alias all architecture array assert attribute begin block body
+ buffer bus case component configuration constant disconnect downto else elsif
+ end entity exit file for function generate generic group guarded if impure in
+ inertial inout is label library linkage literal loop map new next null of on
+ open others out package port postponed procedure process pure range record
+ register reject report return select severity signal shared subtype then to
+ transport type unaffected units until use variable wait when while with
+ note warning error failure
+ and nand or nor xor xnor rol ror sla sll sra srl mod rem
+ abs not false true
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, word_match[[
+ rising_edge shift_left shift_right rotate_left rotate_right resize std_match
+ to_integer to_unsigned to_signed unsigned signed to_bit to_bitvector
+ to_stdulogic to_stdlogicvector to_stdulogicvector
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ bit bit_vector character boolean integer real time string severity_level
+ positive natural signed unsigned line text std_logic std_logic_vector
+ std_ulogic std_ulogic_vector qsim_state qsim_state_vector qsim_12state
+ qsim_12state_vector qsim_strength mux_bit mux_vectory reg_bit reg_vector
+ wor_bit wor_vector
+]]))
+
+-- Constants.
+lex:add_rule('constant', token(lexer.CONSTANT, word_match[[
+ EVENT BASE LEFT RIGHT LOW HIGH ASCENDING IMAGE VALUE POS VAL SUCC VAL POS PRED
+ VAL POS LEFTOF RIGHTOF LEFT RIGHT LOW HIGH RANGE REVERSE LENGTH ASCENDING
+ DELAYED STABLE QUIET TRANSACTION EVENT ACTIVE LAST LAST LAST DRIVING DRIVING
+ SIMPLE INSTANCE PATH
+]]))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + "'") *
+ (lexer.alnum + S("_'"))^1))
+
+-- Strings.
+local sq_str = lexer.delimited_range("'", true, true)
+local dq_str = lexer.delimited_range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '--' * lexer.nonnewline^0))
+
+-- Numbers.
+lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer))
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('=/!:;<>+-/*%&|^~()')))
+
+return lex
diff --git a/lexlua/wsf.lua b/lexlua/wsf.lua
new file mode 100644
index 000000000..123d3b543
--- /dev/null
+++ b/lexlua/wsf.lua
@@ -0,0 +1,101 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- WSF LPeg lexer (based on XML).
+-- Contributed by Jeff Stone.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('wsf')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Comments.
+lex:add_rule('comment', token(lexer.COMMENT, '<!--' * (lexer.any - '-->')^0 *
+ P('-->')^-1))
+
+local alpha = R('az', 'AZ', '\127\255')
+local word_char = lexer.alnum + S('_-:.??')
+local identifier = (alpha + S('_-:.??')) * word_char^0
+
+-- Elements.
+local element = token('element', '<' * P('/')^-1 * identifier)
+lex:add_rule('element', element)
+lex:add_style('element', lexer.STYLE_KEYWORD)
+
+-- Closing tags.
+local tag_close = token('element', P('/')^-1 * '>')
+lex:add_rule('tag_close', tag_close)
+
+-- Attributes.
+local attribute = token('attribute', identifier) * #(lexer.space^0 * '=')
+lex:add_rule('attribute', attribute)
+lex:add_style('attribute', lexer.STYLE_TYPE)
+
+local in_tag = P(function(input, index)
+ local before = input:sub(1, index - 1)
+ local s, e = before:find('<[^>]-$'), before:find('>[^<]-$')
+ if s and e then return s > e and index or nil end
+ if s then return index end
+ return input:find('^[^<]->', index) and index or nil
+end)
+
+-- Equals.
+local equals = token(lexer.OPERATOR, '=') * in_tag
+lex:add_rule('equals', equals)
+
+-- Strings.
+local string = #S('\'"') * lexer.last_char_includes('=') *
+ token(lexer.STRING, lexer.delimited_range("'", false, true) +
+ lexer.delimited_range('"', false, true))
+lex:add_rule('string', string)
+
+-- Numbers.
+lex:add_rule('number', #lexer.digit * lexer.last_char_includes('=') *
+ token(lexer.NUMBER, lexer.digit^1 * P('%')^-1) * in_tag)
+
+-- Entities.
+lex:add_rule('entity', token('entity', '&' * word_match[[
+ lt gt amp apos quot
+]] * ';'))
+lex:add_style('entity', lexer.STYLE_OPERATOR)
+
+-- Fold points.
+local function disambiguate_lt(text, pos, line, s)
+ return not line:find('^</', s) and 1 or -1
+end
+lex:add_fold_point('element', '<', disambiguate_lt)
+lex:add_fold_point('element', '/>', -1)
+lex:add_fold_point(lexer.COMMENT, '<!--', '-->')
+
+-- Finally, add JavaScript and VBScript as embedded languages
+
+-- Tags that start embedded languages.
+local embed_start_tag = element *
+ (ws^1 * attribute * ws^0 * equals * ws^0 * string)^0 *
+ ws^0 * tag_close
+local embed_end_tag = element * tag_close
+
+-- Embedded JavaScript.
+local js = lexer.load('javascript')
+local js_start_rule = #(P('<script') * (P(function(input, index)
+ if input:find('^%s+language%s*=%s*(["\'])[jJ][ava]*[sS]cript%1', index) then
+ return index
+ end
+end) + '>')) * embed_start_tag -- <script language="javascript">
+local js_end_rule = #('</script' * ws^0 * '>') * embed_end_tag -- </script>
+lex:embed(js, js_start_rule, js_end_rule)
+
+-- Embedded VBScript.
+local vbs = lexer.load('vbscript')
+local vbs_start_rule = #(P('<script') * (P(function(input, index)
+ if input:find('^%s+language%s*=%s*(["\'])[vV][bB][sS]cript%1', index) then
+ return index
+ end
+end) + '>')) * embed_start_tag -- <script language="vbscript">
+local vbs_end_rule = #('</script' * ws^0 * '>') * embed_end_tag -- </script>
+lex:embed(vbs, vbs_start_rule, vbs_end_rule)
+
+return lex
diff --git a/lexlua/xml.lua b/lexlua/xml.lua
new file mode 100644
index 000000000..d709ef3e5
--- /dev/null
+++ b/lexlua/xml.lua
@@ -0,0 +1,88 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- XML LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lex = lexer.new('xml')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Comments and CDATA.
+lex:add_rule('comment', token(lexer.COMMENT, '<!--' * (lexer.any - '-->')^0 *
+ P('-->')^-1))
+lex:add_rule('cdata', token('cdata', '<![CDATA[' * (lexer.any - ']]>')^0 *
+ P(']]>')^-1))
+lex:add_style('cdata', lexer.STYLE_COMMENT)
+
+local alpha = R('az', 'AZ', '\127\255')
+local word_char = lexer.alnum + S('_-:.??')
+local identifier = (alpha + S('_-:.??')) * word_char^0
+
+-- Doctypes and other markup tags.
+lex:add_rule('doctype', token('doctype', P('<!DOCTYPE')) * ws *
+ token('doctype', identifier) * (ws * identifier)^-1 *
+ (1 - P('>'))^0 * token('doctype', '>'))
+lex:add_style('doctype', lexer.STYLE_COMMENT)
+
+-- Processing instructions.
+lex:add_rule('proc_insn', token('proc_insn', P('<?') * (1 - P('?>'))^0 *
+ P('?>')^-1))
+lex:add_style('proc_insn', lexer.STYLE_COMMENT)
+
+-- Elements.
+local namespace = token(lexer.OPERATOR, ':') * token('namespace', identifier)
+lex:add_rule('element', token('element', '<' * P('/')^-1 * identifier) *
+ namespace^-1)
+lex:add_style('element', lexer.STYLE_KEYWORD)
+lex:add_style('namespace', lexer.STYLE_CLASS)
+
+-- Closing tags.
+lex:add_rule('close_tag', token('element', P('/')^-1 * '>'))
+
+-- Attributes.
+lex:add_rule('attribute', token('attribute', identifier) * namespace^-1 *
+ #(lexer.space^0 * '='))
+lex:add_style('attribute', lexer.STYLE_TYPE)
+
+-- TODO: performance is terrible on large files.
+local in_tag = P(function(input, index)
+ local before = input:sub(1, index - 1)
+ local s, e = before:find('<[^>]-$'), before:find('>[^<]-$')
+ if s and e then return s > e and index or nil end
+ if s then return index end
+ return input:find('^[^<]->', index) and index or nil
+end)
+
+-- Equals.
+--lex:add_rule('equal', token(lexer.OPERATOR, '=')) -- * in_tag
+
+-- Strings.
+lex:add_rule('string', #S('\'"') * lexer.last_char_includes('=') *
+ token(lexer.STRING,
+ lexer.delimited_range("'", false, true) +
+ lexer.delimited_range('"', false, true)))
+
+-- Numbers.
+lex:add_rule('number', #lexer.digit * lexer.last_char_includes('=') *
+ token(lexer.NUMBER, lexer.digit^1 * P('%')^-1))--*in_tag)
+
+-- Entities.
+lex:add_rule('entity', token('entity', '&' * word_match[[
+ lt gt amp apos quot
+]] * ';'))
+lex:add_style('entity', lexer.STYLE_OPERATOR)
+
+-- Fold Points.
+local function disambiguate_lt(text, pos, line, s)
+ return not line:find('^</', s) and 1 or -1
+end
+lex:add_fold_point('element', '<', disambiguate_lt)
+lex:add_fold_point('element', '/>', -1)
+lex:add_fold_point(lexer.COMMENT, '<!--', '-->')
+lex:add_fold_point('cdata', '<![CDATA[', ']]>')
+
+return lex
diff --git a/lexlua/xtend.lua b/lexlua/xtend.lua
new file mode 100644
index 000000000..452080e45
--- /dev/null
+++ b/lexlua/xtend.lua
@@ -0,0 +1,90 @@
+-- Copyright (c) 2014-2018 Piotr Orzechowski [drzewo.org]. See License.txt.
+-- Xtend LPeg lexer.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lex = lexer.new('xtend')
+
+-- Whitespace.
+local ws = token(lexer.WHITESPACE, lexer.space^1)
+lex:add_rule('whitespace', ws)
+
+-- Classes.
+lex:add_rule('class', token(lexer.KEYWORD, P('class')) * ws^1 *
+ token(lexer.CLASS, lexer.word))
+
+-- Keywords.
+lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
+ -- General.
+ abstract annotation as case catch class create def default dispatch do else
+ enum extends extension final finally for if implements import interface
+ instanceof it new override package private protected public return self static
+ super switch synchronized this throw throws try typeof val var while
+ -- Templates.
+ -- AFTER BEFORE ENDFOR ENDIF FOR IF SEPARATOR
+ -- Literals.
+ true false null
+]]))
+
+-- Types.
+lex:add_rule('type', token(lexer.TYPE, word_match[[
+ boolean byte char double float int long short void
+ Boolean Byte Character Double Float Integer Long Short String
+]]))
+
+-- Functions.
+lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('('))
+
+-- Identifiers.
+lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
+
+-- Templates.
+lex:add_rule('template', token('template', "'''" * (lexer.any - P("'''"))^0 *
+ P("'''")^-1))
+lex:add_style('template', lexer.STYLE_EMBEDDED)
+
+-- Strings.
+lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
+ lexer.delimited_range('"', true)))
+
+-- Comments.
+local line_comment = '//' * lexer.nonnewline_esc^0
+local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
+
+-- Numbers.
+local small_suff = S('lL')
+local med_suff = S('bB') * S('iI')
+local large_suff = S('dD') + S('fF') + S('bB') * S('dD')
+local exp = S('eE') * lexer.digit^1
+
+local dec_inf = ('_' * lexer.digit^1)^0
+local hex_inf = ('_' * lexer.xdigit^1)^0
+local float_pref = lexer.digit^1 * '.' * lexer.digit^1
+local float_suff = exp^-1 * med_suff^-1 * large_suff^-1
+
+local dec = lexer.digit * dec_inf * (small_suff^-1 + float_suff)
+local hex = lexer.hex_num * hex_inf * P('#' * (small_suff + med_suff))^-1
+local float = float_pref * dec_inf * float_suff
+
+lex:add_rule('number', token(lexer.NUMBER, float + hex + dec))
+
+-- Annotations.
+lex:add_rule('annotation', token('annotation', '@' * lexer.word))
+lex:add_style('annotation', lexer.STYLE_PREPROCESSOR)
+
+-- Operators.
+lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#')))
+
+-- Error.
+lex:add_rule('error', token(lexer.ERROR, lexer.any))
+
+-- Fold points.
+lex:add_fold_point(lexer.OPERATOR, '{', '}')
+lex:add_fold_point(lexer.COMMENT, '/*', '*/')
+lex:add_fold_point(lexer.COMMENT, '//', lexer.fold_line_comments('//'))
+lex:add_fold_point(lexer.KEYWORD, 'import', lexer.fold_line_comments('import'))
+
+return lex
diff --git a/lexlua/yaml.lua b/lexlua/yaml.lua
new file mode 100644
index 000000000..abfab8a60
--- /dev/null
+++ b/lexlua/yaml.lua
@@ -0,0 +1,120 @@
+-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt.
+-- YAML LPeg lexer.
+-- It does not keep track of indentation perfectly.
+
+local lexer = require('lexer')
+local token, word_match = lexer.token, lexer.word_match
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local M = {_NAME = 'yaml'}
+
+-- Whitespace.
+local indent = #lexer.starts_line(S(' \t')) *
+ (token(lexer.WHITESPACE, ' ') + token('indent_error', '\t'))^1
+local ws = token(lexer.WHITESPACE, S(' \t')^1 + lexer.newline^1)
+
+-- Comments.
+local comment = token(lexer.COMMENT, '#' * lexer.nonnewline^0)
+
+-- Strings.
+local string = token(lexer.STRING, lexer.delimited_range("'") +
+ lexer.delimited_range('"'))
+
+-- Numbers.
+local integer = lexer.dec_num + lexer.hex_num + '0' * S('oO') * R('07')^1
+local special_num = '.' * word_match({'inf', 'nan'}, nil, true)
+local number = token(lexer.NUMBER, special_num + lexer.float + integer)
+
+-- Timestamps.
+local ts = token('timestamp',
+ lexer.digit * lexer.digit * lexer.digit * lexer.digit * -- year
+ '-' * lexer.digit * lexer.digit^-1 * -- month
+ '-' * lexer.digit * lexer.digit^-1 * -- day
+ ((S(' \t')^1 + S('tT'))^-1 * -- separator
+ lexer.digit * lexer.digit^-1 * -- hour
+ ':' * lexer.digit * lexer.digit * -- minute
+ ':' * lexer.digit * lexer.digit * -- second
+ ('.' * lexer.digit^0)^-1 * -- fraction
+ ('Z' + -- timezone
+ S(' \t')^0 * S('-+') * lexer.digit * lexer.digit^-1 *
+ (':' * lexer.digit * lexer.digit)^-1)^-1)^-1)
+
+-- Constants.
+local constant = token(lexer.CONSTANT,
+ word_match({'null', 'true', 'false'}, nil, true))
+
+-- Types.
+local type = token(lexer.TYPE, '!!' * word_match({
+ -- Collection types.
+ 'map', 'omap', 'pairs', 'set', 'seq',
+ -- Scalar types.
+ 'binary', 'bool', 'float', 'int', 'merge', 'null', 'str', 'timestamp',
+ 'value', 'yaml'
+}, nil, true) + '!' * lexer.delimited_range('<>'))
+
+-- Document boundaries.
+local doc_bounds = token('document', lexer.starts_line(P('---') + '...'))
+
+-- Directives
+local directive = token('directive', lexer.starts_line('%') *
+ lexer.nonnewline^1)
+
+local word = (lexer.alpha + '-' * -lexer.space) * (lexer.alnum + '-')^0
+
+-- Keys and literals.
+local colon = S(' \t')^0 * ':' * (lexer.space + -1)
+local key = token(lexer.KEYWORD,
+ #word * (lexer.nonnewline - colon)^1 * #colon *
+ P(function(input, index)
+ local line = input:sub(1, index - 1):match('[^\r\n]+$')
+ return not line:find('[%w-]+:') and index
+ end))
+local value = #word * (lexer.nonnewline - lexer.space^0 * S(',]}'))^1
+local block = S('|>') * S('+-')^-1 * (lexer.newline + -1) *
+ function(input, index)
+ local rest = input:sub(index)
+ local level = #rest:match('^( *)')
+ for pos, indent, line in rest:gmatch('() *()([^\r\n]+)') do
+ if indent - pos < level and line ~= ' ' or
+ level == 0 and pos > 1 then
+ return index + pos - 1
+ end
+ end
+ return #input + 1
+ end
+local literal = token('literal', value + block)
+
+-- Indicators.
+local anchor = token(lexer.LABEL, '&' * word)
+local alias = token(lexer.VARIABLE, '*' * word)
+local tag = token('tag', '!' * word * P('!')^-1)
+local reserved = token(lexer.ERROR, S('@`') * word)
+local indicator_chars = token(lexer.OPERATOR, S('-?:,[]{}!'))
+
+M._rules = {
+ {'indent', indent},
+ {'whitespace', ws},
+ {'comment', comment},
+ {'doc_bounds', doc_bounds},
+ {'key', key},
+ {'literal', literal},
+ {'timestamp', ts},
+ {'number', number},
+ {'constant', constant},
+ {'type', type},
+ {'indicator', tag + indicator_chars + alias + anchor + reserved},
+ {'directive', directive},
+}
+
+M._tokenstyles = {
+ indent_error = 'back:%(color.red)',
+ document = lexer.STYLE_CONSTANT,
+ literal = lexer.STYLE_DEFAULT,
+ timestamp = lexer.STYLE_NUMBER,
+ tag = lexer.STYLE_CLASS,
+ directive = lexer.STYLE_PREPROCESSOR,
+}
+
+M._FOLDBYINDENTATION = true
+
+return M
diff --git a/lua/LICENSE b/lua/LICENSE
new file mode 100644
index 000000000..4212dee67
--- /dev/null
+++ b/lua/LICENSE
@@ -0,0 +1,19 @@
+Copyright © 1994–2017 Lua.org, PUC-Rio.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/lua/README b/lua/README
new file mode 100644
index 000000000..1238641b5
--- /dev/null
+++ b/lua/README
@@ -0,0 +1,7 @@
+
+This is Lua 5.3.4, released on 12 Jan 2017.
+
+For further information about Lua, see doc/manual.html.
+
+Scintilla only uses Lua with its LPeg lexer, which is disabled by default.
+In order to enable it, see ../doc/LPegLexer.html.
diff --git a/lua/doc/lua.css b/lua/doc/lua.css
new file mode 100644
index 000000000..5bedf7eb8
--- /dev/null
+++ b/lua/doc/lua.css
@@ -0,0 +1,164 @@
+html {
+ background-color: #F8F8F8 ;
+}
+
+body {
+ background-color: #FFFFFF ;
+ color: #000000 ;
+ font-family: Helvetica, Arial, sans-serif ;
+ text-align: justify ;
+ line-height: 1.25 ;
+ margin: 16px auto ;
+ padding: 32px ;
+ border: solid #a0a0a0 1px ;
+ border-radius: 20px ;
+ max-width: 70em ;
+ width: 90% ;
+}
+
+h1, h2, h3, h4 {
+ color: #000080 ;
+ font-family: Verdana, Geneva, sans-serif ;
+ font-weight: normal ;
+ font-style: normal ;
+ text-align: left ;
+}
+
+h1 {
+ font-size: 28pt ;
+}
+
+h1 img {
+ vertical-align: text-bottom ;
+}
+
+h2:before {
+ content: "\2756" ;
+ padding-right: 0.5em ;
+}
+
+a {
+ text-decoration: none ;
+}
+
+a:link {
+ color: #000080 ;
+}
+
+a:link:hover, a:visited:hover {
+ background-color: #D0D0FF ;
+ color: #000080 ;
+ border-radius: 4px ;
+}
+
+a:link:active, a:visited:active {
+ color: #FF0000 ;
+}
+
+div.menubar {
+ padding-bottom: 0.5em ;
+}
+
+p.menubar {
+ margin-left: 2.5em ;
+}
+
+.menubar a:hover {
+ margin: -3px -3px -3px -3px ;
+ padding: 3px 3px 3px 3px ;
+ border-radius: 4px ;
+}
+
+:target {
+ background-color: #F0F0F0 ;
+ margin: -8px ;
+ padding: 8px ;
+ border-radius: 8px ;
+ outline: none ;
+}
+
+hr {
+ display: none ;
+}
+
+table hr {
+ background-color: #a0a0a0 ;
+ color: #a0a0a0 ;
+ border: 0 ;
+ height: 1px ;
+ display: block ;
+}
+
+.footer {
+ color: gray ;
+ font-size: x-small ;
+ text-transform: lowercase ;
+}
+
+input[type=text] {
+ border: solid #a0a0a0 2px ;
+ border-radius: 2em ;
+ background-image: url('images/search.png') ;
+ background-repeat: no-repeat ;
+ background-position: 4px center ;
+ padding-left: 20px ;
+ height: 2em ;
+}
+
+pre.session {
+ background-color: #F8F8F8 ;
+ padding: 1em ;
+ border-radius: 8px ;
+}
+
+td.gutter {
+ width: 4% ;
+}
+
+table.columns {
+ border: none ;
+ border-spacing: 0 ;
+ border-collapse: collapse ;
+}
+
+table.columns td {
+ vertical-align: top ;
+ padding: 0 ;
+ padding-bottom: 1em ;
+ text-align: justify ;
+ line-height: 1.25 ;
+}
+
+p.logos a:link:hover, p.logos a:visited:hover {
+ background-color: inherit ;
+}
+
+table.book {
+ border: none ;
+ border-spacing: 0 ;
+ border-collapse: collapse ;
+}
+
+table.book td {
+ padding: 0 ;
+ vertical-align: top ;
+}
+
+table.book td.cover {
+ padding-right: 1em ;
+}
+
+table.book img {
+ border: solid #000080 1px ;
+}
+
+table.book span {
+ font-size: small ;
+ text-align: left ;
+ display: block ;
+ margin-top: 0.25em ;
+}
+
+img {
+ background-color: white ;
+}
diff --git a/lua/doc/manual.css b/lua/doc/manual.css
new file mode 100644
index 000000000..aa0e677dd
--- /dev/null
+++ b/lua/doc/manual.css
@@ -0,0 +1,21 @@
+h3 code {
+ font-family: inherit ;
+ font-size: inherit ;
+}
+
+pre, code {
+ font-size: 12pt ;
+}
+
+span.apii {
+ color: gray ;
+ float: right ;
+ font-family: inherit ;
+ font-style: normal ;
+ font-size: small ;
+}
+
+h2:before {
+ content: "" ;
+ padding-right: 0em ;
+}
diff --git a/lua/doc/manual.html b/lua/doc/manual.html
new file mode 100644
index 000000000..3126b5d6a
--- /dev/null
+++ b/lua/doc/manual.html
@@ -0,0 +1,10985 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<HTML>
+<HEAD>
+<TITLE>Lua 5.3 Reference Manual</TITLE>
+<LINK REL="stylesheet" TYPE="text/css" HREF="lua.css">
+<LINK REL="stylesheet" TYPE="text/css" HREF="manual.css">
+<META HTTP-EQUIV="content-type" CONTENT="text/html; charset=iso-8859-1">
+</HEAD>
+
+<BODY>
+
+<H1>
+<A HREF="http://www.lua.org/"><IMG SRC="logo.gif" ALT="Lua"></A>
+Lua 5.3 Reference Manual
+</H1>
+
+<P>
+by Roberto Ierusalimschy, Luiz Henrique de Figueiredo, Waldemar Celes
+
+<P>
+<SMALL>
+Copyright &copy; 2015&ndash;2017 Lua.org, PUC-Rio.
+Freely available under the terms of the
+<a href="http://www.lua.org/license.html">Lua license</a>.
+</SMALL>
+
+<DIV CLASS="menubar">
+<A HREF="contents.html#contents">contents</A>
+&middot;
+<A HREF="contents.html#index">index</A>
+&middot;
+<A HREF="http://www.lua.org/manual/">other versions</A>
+</DIV>
+
+<!-- ====================================================================== -->
+<p>
+
+<!-- $Id: manual.of,v 1.167 2017/01/09 15:18:11 roberto Exp $ -->
+
+
+
+
+<h1>1 &ndash; <a name="1">Introduction</a></h1>
+
+<p>
+Lua is a powerful, efficient, lightweight, embeddable scripting language.
+It supports procedural programming,
+object-oriented programming, functional programming,
+data-driven programming, and data description.
+
+
+<p>
+Lua combines simple procedural syntax with powerful data description
+constructs based on associative arrays and extensible semantics.
+Lua is dynamically typed,
+runs by interpreting bytecode with a register-based
+virtual machine,
+and has automatic memory management with
+incremental garbage collection,
+making it ideal for configuration, scripting,
+and rapid prototyping.
+
+
+<p>
+Lua is implemented as a library, written in <em>clean C</em>,
+the common subset of Standard&nbsp;C and C++.
+The Lua distribution includes a host program called <code>lua</code>,
+which uses the Lua library to offer a complete,
+standalone Lua interpreter,
+for interactive or batch use.
+Lua is intended to be used both as a powerful, lightweight,
+embeddable scripting language for any program that needs one,
+and as a powerful but lightweight and efficient stand-alone language.
+
+
+<p>
+As an extension language, Lua has no notion of a "main" program:
+it works <em>embedded</em> in a host client,
+called the <em>embedding program</em> or simply the <em>host</em>.
+(Frequently, this host is the stand-alone <code>lua</code> program.)
+The host program can invoke functions to execute a piece of Lua code,
+can write and read Lua variables,
+and can register C&nbsp;functions to be called by Lua code.
+Through the use of C&nbsp;functions, Lua can be augmented to cope with
+a wide range of different domains,
+thus creating customized programming languages sharing a syntactical framework.
+
+
+<p>
+Lua is free software,
+and is provided as usual with no guarantees,
+as stated in its license.
+The implementation described in this manual is available
+at Lua's official web site, <code>www.lua.org</code>.
+
+
+<p>
+Like any other reference manual,
+this document is dry in places.
+For a discussion of the decisions behind the design of Lua,
+see the technical papers available at Lua's web site.
+For a detailed introduction to programming in Lua,
+see Roberto's book, <em>Programming in Lua</em>.
+
+
+
+<h1>2 &ndash; <a name="2">Basic Concepts</a></h1>
+
+<p>
+This section describes the basic concepts of the language.
+
+
+
+<h2>2.1 &ndash; <a name="2.1">Values and Types</a></h2>
+
+<p>
+Lua is a <em>dynamically typed language</em>.
+This means that
+variables do not have types; only values do.
+There are no type definitions in the language.
+All values carry their own type.
+
+
+<p>
+All values in Lua are <em>first-class values</em>.
+This means that all values can be stored in variables,
+passed as arguments to other functions, and returned as results.
+
+
+<p>
+There are eight basic types in Lua:
+<em>nil</em>, <em>boolean</em>, <em>number</em>,
+<em>string</em>, <em>function</em>, <em>userdata</em>,
+<em>thread</em>, and <em>table</em>.
+The type <em>nil</em> has one single value, <b>nil</b>,
+whose main property is to be different from any other value;
+it usually represents the absence of a useful value.
+The type <em>boolean</em> has two values, <b>false</b> and <b>true</b>.
+Both <b>nil</b> and <b>false</b> make a condition false;
+any other value makes it true.
+The type <em>number</em> represents both
+integer numbers and real (floating-point) numbers.
+The type <em>string</em> represents immutable sequences of bytes.
+
+Lua is 8-bit clean:
+strings can contain any 8-bit value,
+including embedded zeros ('<code>\0</code>').
+Lua is also encoding-agnostic;
+it makes no assumptions about the contents of a string.
+
+
+<p>
+The type <em>number</em> uses two internal representations,
+or two subtypes,
+one called <em>integer</em> and the other called <em>float</em>.
+Lua has explicit rules about when each representation is used,
+but it also converts between them automatically as needed (see <a href="#3.4.3">&sect;3.4.3</a>).
+Therefore,
+the programmer may choose to mostly ignore the difference
+between integers and floats
+or to assume complete control over the representation of each number.
+Standard Lua uses 64-bit integers and double-precision (64-bit) floats,
+but you can also compile Lua so that it
+uses 32-bit integers and/or single-precision (32-bit) floats.
+The option with 32 bits for both integers and floats
+is particularly attractive
+for small machines and embedded systems.
+(See macro <code>LUA_32BITS</code> in file <code>luaconf.h</code>.)
+
+
+<p>
+Lua can call (and manipulate) functions written in Lua and
+functions written in C (see <a href="#3.4.10">&sect;3.4.10</a>).
+Both are represented by the type <em>function</em>.
+
+
+<p>
+The type <em>userdata</em> is provided to allow arbitrary C&nbsp;data to
+be stored in Lua variables.
+A userdata value represents a block of raw memory.
+There are two kinds of userdata:
+<em>full userdata</em>,
+which is an object with a block of memory managed by Lua,
+and <em>light userdata</em>,
+which is simply a C&nbsp;pointer value.
+Userdata has no predefined operations in Lua,
+except assignment and identity test.
+By using <em>metatables</em>,
+the programmer can define operations for full userdata values
+(see <a href="#2.4">&sect;2.4</a>).
+Userdata values cannot be created or modified in Lua,
+only through the C&nbsp;API.
+This guarantees the integrity of data owned by the host program.
+
+
+<p>
+The type <em>thread</em> represents independent threads of execution
+and it is used to implement coroutines (see <a href="#2.6">&sect;2.6</a>).
+Lua threads are not related to operating-system threads.
+Lua supports coroutines on all systems,
+even those that do not support threads natively.
+
+
+<p>
+The type <em>table</em> implements associative arrays,
+that is, arrays that can be indexed not only with numbers,
+but with any Lua value except <b>nil</b> and NaN.
+(<em>Not a Number</em> is a special value used to represent
+undefined or unrepresentable numerical results, such as <code>0/0</code>.)
+Tables can be <em>heterogeneous</em>;
+that is, they can contain values of all types (except <b>nil</b>).
+Any key with value <b>nil</b> is not considered part of the table.
+Conversely, any key that is not part of a table has
+an associated value <b>nil</b>.
+
+
+<p>
+Tables are the sole data-structuring mechanism in Lua;
+they can be used to represent ordinary arrays, lists,
+symbol tables, sets, records, graphs, trees, etc.
+To represent records, Lua uses the field name as an index.
+The language supports this representation by
+providing <code>a.name</code> as syntactic sugar for <code>a["name"]</code>.
+There are several convenient ways to create tables in Lua
+(see <a href="#3.4.9">&sect;3.4.9</a>).
+
+
+<p>
+Like indices,
+the values of table fields can be of any type.
+In particular,
+because functions are first-class values,
+table fields can contain functions.
+Thus tables can also carry <em>methods</em> (see <a href="#3.4.11">&sect;3.4.11</a>).
+
+
+<p>
+The indexing of tables follows
+the definition of raw equality in the language.
+The expressions <code>a[i]</code> and <code>a[j]</code>
+denote the same table element
+if and only if <code>i</code> and <code>j</code> are raw equal
+(that is, equal without metamethods).
+In particular, floats with integral values
+are equal to their respective integers
+(e.g., <code>1.0 == 1</code>).
+To avoid ambiguities,
+any float with integral value used as a key
+is converted to its respective integer.
+For instance, if you write <code>a[2.0] = true</code>,
+the actual key inserted into the table will be the
+integer <code>2</code>.
+(On the other hand,
+2 and "<code>2</code>" are different Lua values and therefore
+denote different table entries.)
+
+
+<p>
+Tables, functions, threads, and (full) userdata values are <em>objects</em>:
+variables do not actually <em>contain</em> these values,
+only <em>references</em> to them.
+Assignment, parameter passing, and function returns
+always manipulate references to such values;
+these operations do not imply any kind of copy.
+
+
+<p>
+The library function <a href="#pdf-type"><code>type</code></a> returns a string describing the type
+of a given value (see <a href="#6.1">&sect;6.1</a>).
+
+
+
+
+
+<h2>2.2 &ndash; <a name="2.2">Environments and the Global Environment</a></h2>
+
+<p>
+As will be discussed in <a href="#3.2">&sect;3.2</a> and <a href="#3.3.3">&sect;3.3.3</a>,
+any reference to a free name
+(that is, a name not bound to any declaration) <code>var</code>
+is syntactically translated to <code>_ENV.var</code>.
+Moreover, every chunk is compiled in the scope of
+an external local variable named <code>_ENV</code> (see <a href="#3.3.2">&sect;3.3.2</a>),
+so <code>_ENV</code> itself is never a free name in a chunk.
+
+
+<p>
+Despite the existence of this external <code>_ENV</code> variable and
+the translation of free names,
+<code>_ENV</code> is a completely regular name.
+In particular,
+you can define new variables and parameters with that name.
+Each reference to a free name uses the <code>_ENV</code> that is
+visible at that point in the program,
+following the usual visibility rules of Lua (see <a href="#3.5">&sect;3.5</a>).
+
+
+<p>
+Any table used as the value of <code>_ENV</code> is called an <em>environment</em>.
+
+
+<p>
+Lua keeps a distinguished environment called the <em>global environment</em>.
+This value is kept at a special index in the C registry (see <a href="#4.5">&sect;4.5</a>).
+In Lua, the global variable <a href="#pdf-_G"><code>_G</code></a> is initialized with this same value.
+(<a href="#pdf-_G"><code>_G</code></a> is never used internally.)
+
+
+<p>
+When Lua loads a chunk,
+the default value for its <code>_ENV</code> upvalue
+is the global environment (see <a href="#pdf-load"><code>load</code></a>).
+Therefore, by default,
+free names in Lua code refer to entries in the global environment
+(and, therefore, they are also called <em>global variables</em>).
+Moreover, all standard libraries are loaded in the global environment
+and some functions there operate on that environment.
+You can use <a href="#pdf-load"><code>load</code></a> (or <a href="#pdf-loadfile"><code>loadfile</code></a>)
+to load a chunk with a different environment.
+(In C, you have to load the chunk and then change the value
+of its first upvalue.)
+
+
+
+
+
+<h2>2.3 &ndash; <a name="2.3">Error Handling</a></h2>
+
+<p>
+Because Lua is an embedded extension language,
+all Lua actions start from C&nbsp;code in the host program
+calling a function from the Lua library.
+(When you use Lua standalone,
+the <code>lua</code> application is the host program.)
+Whenever an error occurs during
+the compilation or execution of a Lua chunk,
+control returns to the host,
+which can take appropriate measures
+(such as printing an error message).
+
+
+<p>
+Lua code can explicitly generate an error by calling the
+<a href="#pdf-error"><code>error</code></a> function.
+If you need to catch errors in Lua,
+you can use <a href="#pdf-pcall"><code>pcall</code></a> or <a href="#pdf-xpcall"><code>xpcall</code></a>
+to call a given function in <em>protected mode</em>.
+
+
+<p>
+Whenever there is an error,
+an <em>error object</em> (also called an <em>error message</em>)
+is propagated with information about the error.
+Lua itself only generates errors whose error object is a string,
+but programs may generate errors with
+any value as the error object.
+It is up to the Lua program or its host to handle such error objects.
+
+
+<p>
+When you use <a href="#pdf-xpcall"><code>xpcall</code></a> or <a href="#lua_pcall"><code>lua_pcall</code></a>,
+you may give a <em>message handler</em>
+to be called in case of errors.
+This function is called with the original error object
+and returns a new error object.
+It is called before the error unwinds the stack,
+so that it can gather more information about the error,
+for instance by inspecting the stack and creating a stack traceback.
+This message handler is still protected by the protected call;
+so, an error inside the message handler
+will call the message handler again.
+If this loop goes on for too long,
+Lua breaks it and returns an appropriate message.
+(The message handler is called only for regular runtime errors.
+It is not called for memory-allocation errors
+nor for errors while running finalizers.)
+
+
+
+
+
+<h2>2.4 &ndash; <a name="2.4">Metatables and Metamethods</a></h2>
+
+<p>
+Every value in Lua can have a <em>metatable</em>.
+This <em>metatable</em> is an ordinary Lua table
+that defines the behavior of the original value
+under certain special operations.
+You can change several aspects of the behavior
+of operations over a value by setting specific fields in its metatable.
+For instance, when a non-numeric value is the operand of an addition,
+Lua checks for a function in the field "<code>__add</code>" of the value's metatable.
+If it finds one,
+Lua calls this function to perform the addition.
+
+
+<p>
+The key for each event in a metatable is a string
+with the event name prefixed by two underscores;
+the corresponding values are called <em>metamethods</em>.
+In the previous example, the key is "<code>__add</code>"
+and the metamethod is the function that performs the addition.
+
+
+<p>
+You can query the metatable of any value
+using the <a href="#pdf-getmetatable"><code>getmetatable</code></a> function.
+Lua queries metamethods in metatables using a raw access (see <a href="#pdf-rawget"><code>rawget</code></a>).
+So, to retrieve the metamethod for event <code>ev</code> in object <code>o</code>,
+Lua does the equivalent to the following code:
+
+<pre>
+ rawget(getmetatable(<em>o</em>) or {}, "__<em>ev</em>")
+</pre>
+
+<p>
+You can replace the metatable of tables
+using the <a href="#pdf-setmetatable"><code>setmetatable</code></a> function.
+You cannot change the metatable of other types from Lua code
+(except by using the debug library (<a href="#6.10">&sect;6.10</a>));
+you should use the C&nbsp;API for that.
+
+
+<p>
+Tables and full userdata have individual metatables
+(although multiple tables and userdata can share their metatables).
+Values of all other types share one single metatable per type;
+that is, there is one single metatable for all numbers,
+one for all strings, etc.
+By default, a value has no metatable,
+but the string library sets a metatable for the string type (see <a href="#6.4">&sect;6.4</a>).
+
+
+<p>
+A metatable controls how an object behaves in
+arithmetic operations, bitwise operations,
+order comparisons, concatenation, length operation, calls, and indexing.
+A metatable also can define a function to be called
+when a userdata or a table is garbage collected (<a href="#2.5">&sect;2.5</a>).
+
+
+<p>
+For the unary operators (negation, length, and bitwise NOT),
+the metamethod is computed and called with a dummy second operand,
+equal to the first one.
+This extra operand is only to simplify Lua's internals
+(by making these operators behave like a binary operation)
+and may be removed in future versions.
+(For most uses this extra operand is irrelevant.)
+
+
+<p>
+A detailed list of events controlled by metatables is given next.
+Each operation is identified by its corresponding key.
+
+
+
+<ul>
+
+<li><b><code>__add</code>: </b>
+the addition (<code>+</code>) operation.
+If any operand for an addition is not a number
+(nor a string coercible to a number),
+Lua will try to call a metamethod.
+First, Lua will check the first operand (even if it is valid).
+If that operand does not define a metamethod for <code>__add</code>,
+then Lua will check the second operand.
+If Lua can find a metamethod,
+it calls the metamethod with the two operands as arguments,
+and the result of the call
+(adjusted to one value)
+is the result of the operation.
+Otherwise,
+it raises an error.
+</li>
+
+<li><b><code>__sub</code>: </b>
+the subtraction (<code>-</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__mul</code>: </b>
+the multiplication (<code>*</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__div</code>: </b>
+the division (<code>/</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__mod</code>: </b>
+the modulo (<code>%</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__pow</code>: </b>
+the exponentiation (<code>^</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__unm</code>: </b>
+the negation (unary <code>-</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__idiv</code>: </b>
+the floor division (<code>//</code>) operation.
+Behavior similar to the addition operation.
+</li>
+
+<li><b><code>__band</code>: </b>
+the bitwise AND (<code>&amp;</code>) operation.
+Behavior similar to the addition operation,
+except that Lua will try a metamethod
+if any operand is neither an integer
+nor a value coercible to an integer (see <a href="#3.4.3">&sect;3.4.3</a>).
+</li>
+
+<li><b><code>__bor</code>: </b>
+the bitwise OR (<code>|</code>) operation.
+Behavior similar to the bitwise AND operation.
+</li>
+
+<li><b><code>__bxor</code>: </b>
+the bitwise exclusive OR (binary <code>~</code>) operation.
+Behavior similar to the bitwise AND operation.
+</li>
+
+<li><b><code>__bnot</code>: </b>
+the bitwise NOT (unary <code>~</code>) operation.
+Behavior similar to the bitwise AND operation.
+</li>
+
+<li><b><code>__shl</code>: </b>
+the bitwise left shift (<code>&lt;&lt;</code>) operation.
+Behavior similar to the bitwise AND operation.
+</li>
+
+<li><b><code>__shr</code>: </b>
+the bitwise right shift (<code>&gt;&gt;</code>) operation.
+Behavior similar to the bitwise AND operation.
+</li>
+
+<li><b><code>__concat</code>: </b>
+the concatenation (<code>..</code>) operation.
+Behavior similar to the addition operation,
+except that Lua will try a metamethod
+if any operand is neither a string nor a number
+(which is always coercible to a string).
+</li>
+
+<li><b><code>__len</code>: </b>
+the length (<code>#</code>) operation.
+If the object is not a string,
+Lua will try its metamethod.
+If there is a metamethod,
+Lua calls it with the object as argument,
+and the result of the call
+(always adjusted to one value)
+is the result of the operation.
+If there is no metamethod but the object is a table,
+then Lua uses the table length operation (see <a href="#3.4.7">&sect;3.4.7</a>).
+Otherwise, Lua raises an error.
+</li>
+
+<li><b><code>__eq</code>: </b>
+the equal (<code>==</code>) operation.
+Behavior similar to the addition operation,
+except that Lua will try a metamethod only when the values
+being compared are either both tables or both full userdata
+and they are not primitively equal.
+The result of the call is always converted to a boolean.
+</li>
+
+<li><b><code>__lt</code>: </b>
+the less than (<code>&lt;</code>) operation.
+Behavior similar to the addition operation,
+except that Lua will try a metamethod only when the values
+being compared are neither both numbers nor both strings.
+The result of the call is always converted to a boolean.
+</li>
+
+<li><b><code>__le</code>: </b>
+the less equal (<code>&lt;=</code>) operation.
+Unlike other operations,
+the less-equal operation can use two different events.
+First, Lua looks for the <code>__le</code> metamethod in both operands,
+like in the less than operation.
+If it cannot find such a metamethod,
+then it will try the <code>__lt</code> metamethod,
+assuming that <code>a &lt;= b</code> is equivalent to <code>not (b &lt; a)</code>.
+As with the other comparison operators,
+the result is always a boolean.
+(This use of the <code>__lt</code> event can be removed in future versions;
+it is also slower than a real <code>__le</code> metamethod.)
+</li>
+
+<li><b><code>__index</code>: </b>
+The indexing access <code>table[key]</code>.
+This event happens when <code>table</code> is not a table or
+when <code>key</code> is not present in <code>table</code>.
+The metamethod is looked up in <code>table</code>.
+
+
+<p>
+Despite the name,
+the metamethod for this event can be either a function or a table.
+If it is a function,
+it is called with <code>table</code> and <code>key</code> as arguments,
+and the result of the call
+(adjusted to one value)
+is the result of the operation.
+If it is a table,
+the final result is the result of indexing this table with <code>key</code>.
+(This indexing is regular, not raw,
+and therefore can trigger another metamethod.)
+</li>
+
+<li><b><code>__newindex</code>: </b>
+The indexing assignment <code>table[key] = value</code>.
+Like the index event,
+this event happens when <code>table</code> is not a table or
+when <code>key</code> is not present in <code>table</code>.
+The metamethod is looked up in <code>table</code>.
+
+
+<p>
+Like with indexing,
+the metamethod for this event can be either a function or a table.
+If it is a function,
+it is called with <code>table</code>, <code>key</code>, and <code>value</code> as arguments.
+If it is a table,
+Lua does an indexing assignment to this table with the same key and value.
+(This assignment is regular, not raw,
+and therefore can trigger another metamethod.)
+
+
+<p>
+Whenever there is a <code>__newindex</code> metamethod,
+Lua does not perform the primitive assignment.
+(If necessary,
+the metamethod itself can call <a href="#pdf-rawset"><code>rawset</code></a>
+to do the assignment.)
+</li>
+
+<li><b><code>__call</code>: </b>
+The call operation <code>func(args)</code>.
+This event happens when Lua tries to call a non-function value
+(that is, <code>func</code> is not a function).
+The metamethod is looked up in <code>func</code>.
+If present,
+the metamethod is called with <code>func</code> as its first argument,
+followed by the arguments of the original call (<code>args</code>).
+All results of the call
+are the result of the operation.
+(This is the only metamethod that allows multiple results.)
+</li>
+
+</ul>
+
+<p>
+It is a good practice to add all needed metamethods to a table
+before setting it as a metatable of some object.
+In particular, the <code>__gc</code> metamethod works only when this order
+is followed (see <a href="#2.5.1">&sect;2.5.1</a>).
+
+
+<p>
+Because metatables are regular tables,
+they can contain arbitrary fields,
+not only the event names defined above.
+Some functions in the standard library
+(e.g., <a href="#pdf-tostring"><code>tostring</code></a>)
+use other fields in metatables for their own purposes.
+
+
+
+
+
+<h2>2.5 &ndash; <a name="2.5">Garbage Collection</a></h2>
+
+<p>
+Lua performs automatic memory management.
+This means that
+you do not have to worry about allocating memory for new objects
+or freeing it when the objects are no longer needed.
+Lua manages memory automatically by running
+a <em>garbage collector</em> to collect all <em>dead objects</em>
+(that is, objects that are no longer accessible from Lua).
+All memory used by Lua is subject to automatic management:
+strings, tables, userdata, functions, threads, internal structures, etc.
+
+
+<p>
+Lua implements an incremental mark-and-sweep collector.
+It uses two numbers to control its garbage-collection cycles:
+the <em>garbage-collector pause</em> and
+the <em>garbage-collector step multiplier</em>.
+Both use percentage points as units
+(e.g., a value of 100 means an internal value of 1).
+
+
+<p>
+The garbage-collector pause
+controls how long the collector waits before starting a new cycle.
+Larger values make the collector less aggressive.
+Values smaller than 100 mean the collector will not wait to
+start a new cycle.
+A value of 200 means that the collector waits for the total memory in use
+to double before starting a new cycle.
+
+
+<p>
+The garbage-collector step multiplier
+controls the relative speed of the collector relative to
+memory allocation.
+Larger values make the collector more aggressive but also increase
+the size of each incremental step.
+You should not use values smaller than 100,
+because they make the collector too slow and
+can result in the collector never finishing a cycle.
+The default is 200,
+which means that the collector runs at "twice"
+the speed of memory allocation.
+
+
+<p>
+If you set the step multiplier to a very large number
+(larger than 10% of the maximum number of
+bytes that the program may use),
+the collector behaves like a stop-the-world collector.
+If you then set the pause to 200,
+the collector behaves as in old Lua versions,
+doing a complete collection every time Lua doubles its
+memory usage.
+
+
+<p>
+You can change these numbers by calling <a href="#lua_gc"><code>lua_gc</code></a> in C
+or <a href="#pdf-collectgarbage"><code>collectgarbage</code></a> in Lua.
+You can also use these functions to control
+the collector directly (e.g., stop and restart it).
+
+
+
+<h3>2.5.1 &ndash; <a name="2.5.1">Garbage-Collection Metamethods</a></h3>
+
+<p>
+You can set garbage-collector metamethods for tables
+and, using the C&nbsp;API,
+for full userdata (see <a href="#2.4">&sect;2.4</a>).
+These metamethods are also called <em>finalizers</em>.
+Finalizers allow you to coordinate Lua's garbage collection
+with external resource management
+(such as closing files, network or database connections,
+or freeing your own memory).
+
+
+<p>
+For an object (table or userdata) to be finalized when collected,
+you must <em>mark</em> it for finalization.
+
+You mark an object for finalization when you set its metatable
+and the metatable has a field indexed by the string "<code>__gc</code>".
+Note that if you set a metatable without a <code>__gc</code> field
+and later create that field in the metatable,
+the object will not be marked for finalization.
+
+
+<p>
+When a marked object becomes garbage,
+it is not collected immediately by the garbage collector.
+Instead, Lua puts it in a list.
+After the collection,
+Lua goes through that list.
+For each object in the list,
+it checks the object's <code>__gc</code> metamethod:
+If it is a function,
+Lua calls it with the object as its single argument;
+if the metamethod is not a function,
+Lua simply ignores it.
+
+
+<p>
+At the end of each garbage-collection cycle,
+the finalizers for objects are called in
+the reverse order that the objects were marked for finalization,
+among those collected in that cycle;
+that is, the first finalizer to be called is the one associated
+with the object marked last in the program.
+The execution of each finalizer may occur at any point during
+the execution of the regular code.
+
+
+<p>
+Because the object being collected must still be used by the finalizer,
+that object (and other objects accessible only through it)
+must be <em>resurrected</em> by Lua.
+Usually, this resurrection is transient,
+and the object memory is freed in the next garbage-collection cycle.
+However, if the finalizer stores the object in some global place
+(e.g., a global variable),
+then the resurrection is permanent.
+Moreover, if the finalizer marks a finalizing object for finalization again,
+its finalizer will be called again in the next cycle where the
+object is unreachable.
+In any case,
+the object memory is freed only in a GC cycle where
+the object is unreachable and not marked for finalization.
+
+
+<p>
+When you close a state (see <a href="#lua_close"><code>lua_close</code></a>),
+Lua calls the finalizers of all objects marked for finalization,
+following the reverse order that they were marked.
+If any finalizer marks objects for collection during that phase,
+these marks have no effect.
+
+
+
+
+
+<h3>2.5.2 &ndash; <a name="2.5.2">Weak Tables</a></h3>
+
+<p>
+A <em>weak table</em> is a table whose elements are
+<em>weak references</em>.
+A weak reference is ignored by the garbage collector.
+In other words,
+if the only references to an object are weak references,
+then the garbage collector will collect that object.
+
+
+<p>
+A weak table can have weak keys, weak values, or both.
+A table with weak values allows the collection of its values,
+but prevents the collection of its keys.
+A table with both weak keys and weak values allows the collection of
+both keys and values.
+In any case, if either the key or the value is collected,
+the whole pair is removed from the table.
+The weakness of a table is controlled by the
+<code>__mode</code> field of its metatable.
+If the <code>__mode</code> field is a string containing the character&nbsp;'<code>k</code>',
+the keys in the table are weak.
+If <code>__mode</code> contains '<code>v</code>',
+the values in the table are weak.
+
+
+<p>
+A table with weak keys and strong values
+is also called an <em>ephemeron table</em>.
+In an ephemeron table,
+a value is considered reachable only if its key is reachable.
+In particular,
+if the only reference to a key comes through its value,
+the pair is removed.
+
+
+<p>
+Any change in the weakness of a table may take effect only
+at the next collect cycle.
+In particular, if you change the weakness to a stronger mode,
+Lua may still collect some items from that table
+before the change takes effect.
+
+
+<p>
+Only objects that have an explicit construction
+are removed from weak tables.
+Values, such as numbers and light C&nbsp;functions,
+are not subject to garbage collection,
+and therefore are not removed from weak tables
+(unless their associated values are collected).
+Although strings are subject to garbage collection,
+they do not have an explicit construction,
+and therefore are not removed from weak tables.
+
+
+<p>
+Resurrected objects
+(that is, objects being finalized
+and objects accessible only through objects being finalized)
+have a special behavior in weak tables.
+They are removed from weak values before running their finalizers,
+but are removed from weak keys only in the next collection
+after running their finalizers, when such objects are actually freed.
+This behavior allows the finalizer to access properties
+associated with the object through weak tables.
+
+
+<p>
+If a weak table is among the resurrected objects in a collection cycle,
+it may not be properly cleared until the next cycle.
+
+
+
+
+
+
+
+<h2>2.6 &ndash; <a name="2.6">Coroutines</a></h2>
+
+<p>
+Lua supports coroutines,
+also called <em>collaborative multithreading</em>.
+A coroutine in Lua represents an independent thread of execution.
+Unlike threads in multithread systems, however,
+a coroutine only suspends its execution by explicitly calling
+a yield function.
+
+
+<p>
+You create a coroutine by calling <a href="#pdf-coroutine.create"><code>coroutine.create</code></a>.
+Its sole argument is a function
+that is the main function of the coroutine.
+The <code>create</code> function only creates a new coroutine and
+returns a handle to it (an object of type <em>thread</em>);
+it does not start the coroutine.
+
+
+<p>
+You execute a coroutine by calling <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>.
+When you first call <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>,
+passing as its first argument
+a thread returned by <a href="#pdf-coroutine.create"><code>coroutine.create</code></a>,
+the coroutine starts its execution by
+calling its main function.
+Extra arguments passed to <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a> are passed
+as arguments to that function.
+After the coroutine starts running,
+it runs until it terminates or <em>yields</em>.
+
+
+<p>
+A coroutine can terminate its execution in two ways:
+normally, when its main function returns
+(explicitly or implicitly, after the last instruction);
+and abnormally, if there is an unprotected error.
+In case of normal termination,
+<a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a> returns <b>true</b>,
+plus any values returned by the coroutine main function.
+In case of errors, <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a> returns <b>false</b>
+plus an error object.
+
+
+<p>
+A coroutine yields by calling <a href="#pdf-coroutine.yield"><code>coroutine.yield</code></a>.
+When a coroutine yields,
+the corresponding <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a> returns immediately,
+even if the yield happens inside nested function calls
+(that is, not in the main function,
+but in a function directly or indirectly called by the main function).
+In the case of a yield, <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a> also returns <b>true</b>,
+plus any values passed to <a href="#pdf-coroutine.yield"><code>coroutine.yield</code></a>.
+The next time you resume the same coroutine,
+it continues its execution from the point where it yielded,
+with the call to <a href="#pdf-coroutine.yield"><code>coroutine.yield</code></a> returning any extra
+arguments passed to <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>.
+
+
+<p>
+Like <a href="#pdf-coroutine.create"><code>coroutine.create</code></a>,
+the <a href="#pdf-coroutine.wrap"><code>coroutine.wrap</code></a> function also creates a coroutine,
+but instead of returning the coroutine itself,
+it returns a function that, when called, resumes the coroutine.
+Any arguments passed to this function
+go as extra arguments to <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>.
+<a href="#pdf-coroutine.wrap"><code>coroutine.wrap</code></a> returns all the values returned by <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>,
+except the first one (the boolean error code).
+Unlike <a href="#pdf-coroutine.resume"><code>coroutine.resume</code></a>,
+<a href="#pdf-coroutine.wrap"><code>coroutine.wrap</code></a> does not catch errors;
+any error is propagated to the caller.
+
+
+<p>
+As an example of how coroutines work,
+consider the following code:
+
+<pre>
+ function foo (a)
+ print("foo", a)
+ return coroutine.yield(2*a)
+ end
+
+ co = coroutine.create(function (a,b)
+ print("co-body", a, b)
+ local r = foo(a+1)
+ print("co-body", r)
+ local r, s = coroutine.yield(a+b, a-b)
+ print("co-body", r, s)
+ return b, "end"
+ end)
+
+ print("main", coroutine.resume(co, 1, 10))
+ print("main", coroutine.resume(co, "r"))
+ print("main", coroutine.resume(co, "x", "y"))
+ print("main", coroutine.resume(co, "x", "y"))
+</pre><p>
+When you run it, it produces the following output:
+
+<pre>
+ co-body 1 10
+ foo 2
+ main true 4
+ co-body r
+ main true 11 -9
+ co-body x y
+ main true 10 end
+ main false cannot resume dead coroutine
+</pre>
+
+<p>
+You can also create and manipulate coroutines through the C API:
+see functions <a href="#lua_newthread"><code>lua_newthread</code></a>, <a href="#lua_resume"><code>lua_resume</code></a>,
+and <a href="#lua_yield"><code>lua_yield</code></a>.
+
+
+
+
+
+<h1>3 &ndash; <a name="3">The Language</a></h1>
+
+<p>
+This section describes the lexis, the syntax, and the semantics of Lua.
+In other words,
+this section describes
+which tokens are valid,
+how they can be combined,
+and what their combinations mean.
+
+
+<p>
+Language constructs will be explained using the usual extended BNF notation,
+in which
+{<em>a</em>}&nbsp;means&nbsp;0 or more <em>a</em>'s, and
+[<em>a</em>]&nbsp;means an optional <em>a</em>.
+Non-terminals are shown like non-terminal,
+keywords are shown like <b>kword</b>,
+and other terminal symbols are shown like &lsquo;<b>=</b>&rsquo;.
+The complete syntax of Lua can be found in <a href="#9">&sect;9</a>
+at the end of this manual.
+
+
+
+<h2>3.1 &ndash; <a name="3.1">Lexical Conventions</a></h2>
+
+<p>
+Lua is a free-form language.
+It ignores spaces (including new lines) and comments
+between lexical elements (tokens),
+except as delimiters between names and keywords.
+
+
+<p>
+<em>Names</em>
+(also called <em>identifiers</em>)
+in Lua can be any string of letters,
+digits, and underscores,
+not beginning with a digit and
+not being a reserved word.
+Identifiers are used to name variables, table fields, and labels.
+
+
+<p>
+The following <em>keywords</em> are reserved
+and cannot be used as names:
+
+
+<pre>
+ and break do else elseif end
+ false for function goto if in
+ local nil not or repeat return
+ then true until while
+</pre>
+
+<p>
+Lua is a case-sensitive language:
+<code>and</code> is a reserved word, but <code>And</code> and <code>AND</code>
+are two different, valid names.
+As a convention,
+programs should avoid creating
+names that start with an underscore followed by
+one or more uppercase letters (such as <a href="#pdf-_VERSION"><code>_VERSION</code></a>).
+
+
+<p>
+The following strings denote other tokens:
+
+<pre>
+ + - * / % ^ #
+ &amp; ~ | &lt;&lt; &gt;&gt; //
+ == ~= &lt;= &gt;= &lt; &gt; =
+ ( ) { } [ ] ::
+ ; : , . .. ...
+</pre>
+
+<p>
+A <em>short literal string</em>
+can be delimited by matching single or double quotes,
+and can contain the following C-like escape sequences:
+'<code>\a</code>' (bell),
+'<code>\b</code>' (backspace),
+'<code>\f</code>' (form feed),
+'<code>\n</code>' (newline),
+'<code>\r</code>' (carriage return),
+'<code>\t</code>' (horizontal tab),
+'<code>\v</code>' (vertical tab),
+'<code>\\</code>' (backslash),
+'<code>\"</code>' (quotation mark [double quote]),
+and '<code>\'</code>' (apostrophe [single quote]).
+A backslash followed by a line break
+results in a newline in the string.
+The escape sequence '<code>\z</code>' skips the following span
+of white-space characters,
+including line breaks;
+it is particularly useful to break and indent a long literal string
+into multiple lines without adding the newlines and spaces
+into the string contents.
+A short literal string cannot contain unescaped line breaks
+nor escapes not forming a valid escape sequence.
+
+
+<p>
+We can specify any byte in a short literal string by its numeric value
+(including embedded zeros).
+This can be done
+with the escape sequence <code>\x<em>XX</em></code>,
+where <em>XX</em> is a sequence of exactly two hexadecimal digits,
+or with the escape sequence <code>\<em>ddd</em></code>,
+where <em>ddd</em> is a sequence of up to three decimal digits.
+(Note that if a decimal escape sequence is to be followed by a digit,
+it must be expressed using exactly three digits.)
+
+
+<p>
+The UTF-8 encoding of a Unicode character
+can be inserted in a literal string with
+the escape sequence <code>\u{<em>XXX</em>}</code>
+(note the mandatory enclosing brackets),
+where <em>XXX</em> is a sequence of one or more hexadecimal digits
+representing the character code point.
+
+
+<p>
+Literal strings can also be defined using a long format
+enclosed by <em>long brackets</em>.
+We define an <em>opening long bracket of level <em>n</em></em> as an opening
+square bracket followed by <em>n</em> equal signs followed by another
+opening square bracket.
+So, an opening long bracket of level&nbsp;0 is written as <code>[[</code>,
+an opening long bracket of level&nbsp;1 is written as <code>[=[</code>,
+and so on.
+A <em>closing long bracket</em> is defined similarly;
+for instance,
+a closing long bracket of level&nbsp;4 is written as <code>]====]</code>.
+A <em>long literal</em> starts with an opening long bracket of any level and
+ends at the first closing long bracket of the same level.
+It can contain any text except a closing bracket of the same level.
+Literals in this bracketed form can run for several lines,
+do not interpret any escape sequences,
+and ignore long brackets of any other level.
+Any kind of end-of-line sequence
+(carriage return, newline, carriage return followed by newline,
+or newline followed by carriage return)
+is converted to a simple newline.
+
+
+<p>
+For convenience,
+when the opening long bracket is immediately followed by a newline,
+the newline is not included in the string.
+As an example, in a system using ASCII
+(in which '<code>a</code>' is coded as&nbsp;97,
+newline is coded as&nbsp;10, and '<code>1</code>' is coded as&nbsp;49),
+the five literal strings below denote the same string:
+
+<pre>
+ a = 'alo\n123"'
+ a = "alo\n123\""
+ a = '\97lo\10\04923"'
+ a = [[alo
+ 123"]]
+ a = [==[
+ alo
+ 123"]==]
+</pre>
+
+<p>
+Any byte in a literal string not
+explicitly affected by the previous rules represents itself.
+However, Lua opens files for parsing in text mode,
+and the system file functions may have problems with
+some control characters.
+So, it is safer to represent
+non-text data as a quoted literal with
+explicit escape sequences for the non-text characters.
+
+
+<p>
+A <em>numeric constant</em> (or <em>numeral</em>)
+can be written with an optional fractional part
+and an optional decimal exponent,
+marked by a letter '<code>e</code>' or '<code>E</code>'.
+Lua also accepts hexadecimal constants,
+which start with <code>0x</code> or <code>0X</code>.
+Hexadecimal constants also accept an optional fractional part
+plus an optional binary exponent,
+marked by a letter '<code>p</code>' or '<code>P</code>'.
+A numeric constant with a radix point or an exponent
+denotes a float;
+otherwise,
+if its value fits in an integer,
+it denotes an integer.
+Examples of valid integer constants are
+
+<pre>
+ 3 345 0xff 0xBEBADA
+</pre><p>
+Examples of valid float constants are
+
+<pre>
+ 3.0 3.1416 314.16e-2 0.31416E1 34e1
+ 0x0.1E 0xA23p-4 0X1.921FB54442D18P+1
+</pre>
+
+<p>
+A <em>comment</em> starts with a double hyphen (<code>--</code>)
+anywhere outside a string.
+If the text immediately after <code>--</code> is not an opening long bracket,
+the comment is a <em>short comment</em>,
+which runs until the end of the line.
+Otherwise, it is a <em>long comment</em>,
+which runs until the corresponding closing long bracket.
+Long comments are frequently used to disable code temporarily.
+
+
+
+
+
+<h2>3.2 &ndash; <a name="3.2">Variables</a></h2>
+
+<p>
+Variables are places that store values.
+There are three kinds of variables in Lua:
+global variables, local variables, and table fields.
+
+
+<p>
+A single name can denote a global variable or a local variable
+(or a function's formal parameter,
+which is a particular kind of local variable):
+
+<pre>
+ var ::= Name
+</pre><p>
+Name denotes identifiers, as defined in <a href="#3.1">&sect;3.1</a>.
+
+
+<p>
+Any variable name is assumed to be global unless explicitly declared
+as a local (see <a href="#3.3.7">&sect;3.3.7</a>).
+Local variables are <em>lexically scoped</em>:
+local variables can be freely accessed by functions
+defined inside their scope (see <a href="#3.5">&sect;3.5</a>).
+
+
+<p>
+Before the first assignment to a variable, its value is <b>nil</b>.
+
+
+<p>
+Square brackets are used to index a table:
+
+<pre>
+ var ::= prefixexp &lsquo;<b>[</b>&rsquo; exp &lsquo;<b>]</b>&rsquo;
+</pre><p>
+The meaning of accesses to table fields can be changed via metatables.
+An access to an indexed variable <code>t[i]</code> is equivalent to
+a call <code>gettable_event(t,i)</code>.
+(See <a href="#2.4">&sect;2.4</a> for a complete description of the
+<code>gettable_event</code> function.
+This function is not defined or callable in Lua.
+We use it here only for explanatory purposes.)
+
+
+<p>
+The syntax <code>var.Name</code> is just syntactic sugar for
+<code>var["Name"]</code>:
+
+<pre>
+ var ::= prefixexp &lsquo;<b>.</b>&rsquo; Name
+</pre>
+
+<p>
+An access to a global variable <code>x</code>
+is equivalent to <code>_ENV.x</code>.
+Due to the way that chunks are compiled,
+<code>_ENV</code> is never a global name (see <a href="#2.2">&sect;2.2</a>).
+
+
+
+
+
+<h2>3.3 &ndash; <a name="3.3">Statements</a></h2>
+
+<p>
+Lua supports an almost conventional set of statements,
+similar to those in Pascal or C.
+This set includes
+assignments, control structures, function calls,
+and variable declarations.
+
+
+
+<h3>3.3.1 &ndash; <a name="3.3.1">Blocks</a></h3>
+
+<p>
+A block is a list of statements,
+which are executed sequentially:
+
+<pre>
+ block ::= {stat}
+</pre><p>
+Lua has <em>empty statements</em>
+that allow you to separate statements with semicolons,
+start a block with a semicolon
+or write two semicolons in sequence:
+
+<pre>
+ stat ::= &lsquo;<b>;</b>&rsquo;
+</pre>
+
+<p>
+Function calls and assignments
+can start with an open parenthesis.
+This possibility leads to an ambiguity in Lua's grammar.
+Consider the following fragment:
+
+<pre>
+ a = b + c
+ (print or io.write)('done')
+</pre><p>
+The grammar could see it in two ways:
+
+<pre>
+ a = b + c(print or io.write)('done')
+
+ a = b + c; (print or io.write)('done')
+</pre><p>
+The current parser always sees such constructions
+in the first way,
+interpreting the open parenthesis
+as the start of the arguments to a call.
+To avoid this ambiguity,
+it is a good practice to always precede with a semicolon
+statements that start with a parenthesis:
+
+<pre>
+ ;(print or io.write)('done')
+</pre>
+
+<p>
+A block can be explicitly delimited to produce a single statement:
+
+<pre>
+ stat ::= <b>do</b> block <b>end</b>
+</pre><p>
+Explicit blocks are useful
+to control the scope of variable declarations.
+Explicit blocks are also sometimes used to
+add a <b>return</b> statement in the middle
+of another block (see <a href="#3.3.4">&sect;3.3.4</a>).
+
+
+
+
+
+<h3>3.3.2 &ndash; <a name="3.3.2">Chunks</a></h3>
+
+<p>
+The unit of compilation of Lua is called a <em>chunk</em>.
+Syntactically,
+a chunk is simply a block:
+
+<pre>
+ chunk ::= block
+</pre>
+
+<p>
+Lua handles a chunk as the body of an anonymous function
+with a variable number of arguments
+(see <a href="#3.4.11">&sect;3.4.11</a>).
+As such, chunks can define local variables,
+receive arguments, and return values.
+Moreover, such anonymous function is compiled as in the
+scope of an external local variable called <code>_ENV</code> (see <a href="#2.2">&sect;2.2</a>).
+The resulting function always has <code>_ENV</code> as its only upvalue,
+even if it does not use that variable.
+
+
+<p>
+A chunk can be stored in a file or in a string inside the host program.
+To execute a chunk,
+Lua first <em>loads</em> it,
+precompiling the chunk's code into instructions for a virtual machine,
+and then Lua executes the compiled code
+with an interpreter for the virtual machine.
+
+
+<p>
+Chunks can also be precompiled into binary form;
+see program <code>luac</code> and function <a href="#pdf-string.dump"><code>string.dump</code></a> for details.
+Programs in source and compiled forms are interchangeable;
+Lua automatically detects the file type and acts accordingly (see <a href="#pdf-load"><code>load</code></a>).
+
+
+
+
+
+<h3>3.3.3 &ndash; <a name="3.3.3">Assignment</a></h3>
+
+<p>
+Lua allows multiple assignments.
+Therefore, the syntax for assignment
+defines a list of variables on the left side
+and a list of expressions on the right side.
+The elements in both lists are separated by commas:
+
+<pre>
+ stat ::= varlist &lsquo;<b>=</b>&rsquo; explist
+ varlist ::= var {&lsquo;<b>,</b>&rsquo; var}
+ explist ::= exp {&lsquo;<b>,</b>&rsquo; exp}
+</pre><p>
+Expressions are discussed in <a href="#3.4">&sect;3.4</a>.
+
+
+<p>
+Before the assignment,
+the list of values is <em>adjusted</em> to the length of
+the list of variables.
+If there are more values than needed,
+the excess values are thrown away.
+If there are fewer values than needed,
+the list is extended with as many <b>nil</b>'s as needed.
+If the list of expressions ends with a function call,
+then all values returned by that call enter the list of values,
+before the adjustment
+(except when the call is enclosed in parentheses; see <a href="#3.4">&sect;3.4</a>).
+
+
+<p>
+The assignment statement first evaluates all its expressions
+and only then the assignments are performed.
+Thus the code
+
+<pre>
+ i = 3
+ i, a[i] = i+1, 20
+</pre><p>
+sets <code>a[3]</code> to 20, without affecting <code>a[4]</code>
+because the <code>i</code> in <code>a[i]</code> is evaluated (to 3)
+before it is assigned&nbsp;4.
+Similarly, the line
+
+<pre>
+ x, y = y, x
+</pre><p>
+exchanges the values of <code>x</code> and <code>y</code>,
+and
+
+<pre>
+ x, y, z = y, z, x
+</pre><p>
+cyclically permutes the values of <code>x</code>, <code>y</code>, and <code>z</code>.
+
+
+<p>
+The meaning of assignments to global variables
+and table fields can be changed via metatables.
+An assignment to an indexed variable <code>t[i] = val</code> is equivalent to
+<code>settable_event(t,i,val)</code>.
+(See <a href="#2.4">&sect;2.4</a> for a complete description of the
+<code>settable_event</code> function.
+This function is not defined or callable in Lua.
+We use it here only for explanatory purposes.)
+
+
+<p>
+An assignment to a global name <code>x = val</code>
+is equivalent to the assignment
+<code>_ENV.x = val</code> (see <a href="#2.2">&sect;2.2</a>).
+
+
+
+
+
+<h3>3.3.4 &ndash; <a name="3.3.4">Control Structures</a></h3><p>
+The control structures
+<b>if</b>, <b>while</b>, and <b>repeat</b> have the usual meaning and
+familiar syntax:
+
+
+
+
+<pre>
+ stat ::= <b>while</b> exp <b>do</b> block <b>end</b>
+ stat ::= <b>repeat</b> block <b>until</b> exp
+ stat ::= <b>if</b> exp <b>then</b> block {<b>elseif</b> exp <b>then</b> block} [<b>else</b> block] <b>end</b>
+</pre><p>
+Lua also has a <b>for</b> statement, in two flavors (see <a href="#3.3.5">&sect;3.3.5</a>).
+
+
+<p>
+The condition expression of a
+control structure can return any value.
+Both <b>false</b> and <b>nil</b> are considered false.
+All values different from <b>nil</b> and <b>false</b> are considered true
+(in particular, the number 0 and the empty string are also true).
+
+
+<p>
+In the <b>repeat</b>&ndash;<b>until</b> loop,
+the inner block does not end at the <b>until</b> keyword,
+but only after the condition.
+So, the condition can refer to local variables
+declared inside the loop block.
+
+
+<p>
+The <b>goto</b> statement transfers the program control to a label.
+For syntactical reasons,
+labels in Lua are considered statements too:
+
+
+
+<pre>
+ stat ::= <b>goto</b> Name
+ stat ::= label
+ label ::= &lsquo;<b>::</b>&rsquo; Name &lsquo;<b>::</b>&rsquo;
+</pre>
+
+<p>
+A label is visible in the entire block where it is defined,
+except
+inside nested blocks where a label with the same name is defined and
+inside nested functions.
+A goto may jump to any visible label as long as it does not
+enter into the scope of a local variable.
+
+
+<p>
+Labels and empty statements are called <em>void statements</em>,
+as they perform no actions.
+
+
+<p>
+The <b>break</b> statement terminates the execution of a
+<b>while</b>, <b>repeat</b>, or <b>for</b> loop,
+skipping to the next statement after the loop:
+
+
+<pre>
+ stat ::= <b>break</b>
+</pre><p>
+A <b>break</b> ends the innermost enclosing loop.
+
+
+<p>
+The <b>return</b> statement is used to return values
+from a function or a chunk
+(which is an anonymous function).
+
+Functions can return more than one value,
+so the syntax for the <b>return</b> statement is
+
+<pre>
+ stat ::= <b>return</b> [explist] [&lsquo;<b>;</b>&rsquo;]
+</pre>
+
+<p>
+The <b>return</b> statement can only be written
+as the last statement of a block.
+If it is really necessary to <b>return</b> in the middle of a block,
+then an explicit inner block can be used,
+as in the idiom <code>do return end</code>,
+because now <b>return</b> is the last statement in its (inner) block.
+
+
+
+
+
+<h3>3.3.5 &ndash; <a name="3.3.5">For Statement</a></h3>
+
+<p>
+
+The <b>for</b> statement has two forms:
+one numerical and one generic.
+
+
+<p>
+The numerical <b>for</b> loop repeats a block of code while a
+control variable runs through an arithmetic progression.
+It has the following syntax:
+
+<pre>
+ stat ::= <b>for</b> Name &lsquo;<b>=</b>&rsquo; exp &lsquo;<b>,</b>&rsquo; exp [&lsquo;<b>,</b>&rsquo; exp] <b>do</b> block <b>end</b>
+</pre><p>
+The <em>block</em> is repeated for <em>name</em> starting at the value of
+the first <em>exp</em>, until it passes the second <em>exp</em> by steps of the
+third <em>exp</em>.
+More precisely, a <b>for</b> statement like
+
+<pre>
+ for v = <em>e1</em>, <em>e2</em>, <em>e3</em> do <em>block</em> end
+</pre><p>
+is equivalent to the code:
+
+<pre>
+ do
+ local <em>var</em>, <em>limit</em>, <em>step</em> = tonumber(<em>e1</em>), tonumber(<em>e2</em>), tonumber(<em>e3</em>)
+ if not (<em>var</em> and <em>limit</em> and <em>step</em>) then error() end
+ <em>var</em> = <em>var</em> - <em>step</em>
+ while true do
+ <em>var</em> = <em>var</em> + <em>step</em>
+ if (<em>step</em> &gt;= 0 and <em>var</em> &gt; <em>limit</em>) or (<em>step</em> &lt; 0 and <em>var</em> &lt; <em>limit</em>) then
+ break
+ end
+ local v = <em>var</em>
+ <em>block</em>
+ end
+ end
+</pre>
+
+<p>
+Note the following:
+
+<ul>
+
+<li>
+All three control expressions are evaluated only once,
+before the loop starts.
+They must all result in numbers.
+</li>
+
+<li>
+<code><em>var</em></code>, <code><em>limit</em></code>, and <code><em>step</em></code> are invisible variables.
+The names shown here are for explanatory purposes only.
+</li>
+
+<li>
+If the third expression (the step) is absent,
+then a step of&nbsp;1 is used.
+</li>
+
+<li>
+You can use <b>break</b> and <b>goto</b> to exit a <b>for</b> loop.
+</li>
+
+<li>
+The loop variable <code>v</code> is local to the loop body.
+If you need its value after the loop,
+assign it to another variable before exiting the loop.
+</li>
+
+</ul>
+
+<p>
+The generic <b>for</b> statement works over functions,
+called <em>iterators</em>.
+On each iteration, the iterator function is called to produce a new value,
+stopping when this new value is <b>nil</b>.
+The generic <b>for</b> loop has the following syntax:
+
+<pre>
+ stat ::= <b>for</b> namelist <b>in</b> explist <b>do</b> block <b>end</b>
+ namelist ::= Name {&lsquo;<b>,</b>&rsquo; Name}
+</pre><p>
+A <b>for</b> statement like
+
+<pre>
+ for <em>var_1</em>, &middot;&middot;&middot;, <em>var_n</em> in <em>explist</em> do <em>block</em> end
+</pre><p>
+is equivalent to the code:
+
+<pre>
+ do
+ local <em>f</em>, <em>s</em>, <em>var</em> = <em>explist</em>
+ while true do
+ local <em>var_1</em>, &middot;&middot;&middot;, <em>var_n</em> = <em>f</em>(<em>s</em>, <em>var</em>)
+ if <em>var_1</em> == nil then break end
+ <em>var</em> = <em>var_1</em>
+ <em>block</em>
+ end
+ end
+</pre><p>
+Note the following:
+
+<ul>
+
+<li>
+<code><em>explist</em></code> is evaluated only once.
+Its results are an <em>iterator</em> function,
+a <em>state</em>,
+and an initial value for the first <em>iterator variable</em>.
+</li>
+
+<li>
+<code><em>f</em></code>, <code><em>s</em></code>, and <code><em>var</em></code> are invisible variables.
+The names are here for explanatory purposes only.
+</li>
+
+<li>
+You can use <b>break</b> to exit a <b>for</b> loop.
+</li>
+
+<li>
+The loop variables <code><em>var_i</em></code> are local to the loop;
+you cannot use their values after the <b>for</b> ends.
+If you need these values,
+then assign them to other variables before breaking or exiting the loop.
+</li>
+
+</ul>
+
+
+
+
+<h3>3.3.6 &ndash; <a name="3.3.6">Function Calls as Statements</a></h3><p>
+To allow possible side-effects,
+function calls can be executed as statements:
+
+<pre>
+ stat ::= functioncall
+</pre><p>
+In this case, all returned values are thrown away.
+Function calls are explained in <a href="#3.4.10">&sect;3.4.10</a>.
+
+
+
+
+
+<h3>3.3.7 &ndash; <a name="3.3.7">Local Declarations</a></h3><p>
+Local variables can be declared anywhere inside a block.
+The declaration can include an initial assignment:
+
+<pre>
+ stat ::= <b>local</b> namelist [&lsquo;<b>=</b>&rsquo; explist]
+</pre><p>
+If present, an initial assignment has the same semantics
+of a multiple assignment (see <a href="#3.3.3">&sect;3.3.3</a>).
+Otherwise, all variables are initialized with <b>nil</b>.
+
+
+<p>
+A chunk is also a block (see <a href="#3.3.2">&sect;3.3.2</a>),
+and so local variables can be declared in a chunk outside any explicit block.
+
+
+<p>
+The visibility rules for local variables are explained in <a href="#3.5">&sect;3.5</a>.
+
+
+
+
+
+
+
+<h2>3.4 &ndash; <a name="3.4">Expressions</a></h2>
+
+<p>
+The basic expressions in Lua are the following:
+
+<pre>
+ exp ::= prefixexp
+ exp ::= <b>nil</b> | <b>false</b> | <b>true</b>
+ exp ::= Numeral
+ exp ::= LiteralString
+ exp ::= functiondef
+ exp ::= tableconstructor
+ exp ::= &lsquo;<b>...</b>&rsquo;
+ exp ::= exp binop exp
+ exp ::= unop exp
+ prefixexp ::= var | functioncall | &lsquo;<b>(</b>&rsquo; exp &lsquo;<b>)</b>&rsquo;
+</pre>
+
+<p>
+Numerals and literal strings are explained in <a href="#3.1">&sect;3.1</a>;
+variables are explained in <a href="#3.2">&sect;3.2</a>;
+function definitions are explained in <a href="#3.4.11">&sect;3.4.11</a>;
+function calls are explained in <a href="#3.4.10">&sect;3.4.10</a>;
+table constructors are explained in <a href="#3.4.9">&sect;3.4.9</a>.
+Vararg expressions,
+denoted by three dots ('<code>...</code>'), can only be used when
+directly inside a vararg function;
+they are explained in <a href="#3.4.11">&sect;3.4.11</a>.
+
+
+<p>
+Binary operators comprise arithmetic operators (see <a href="#3.4.1">&sect;3.4.1</a>),
+bitwise operators (see <a href="#3.4.2">&sect;3.4.2</a>),
+relational operators (see <a href="#3.4.4">&sect;3.4.4</a>), logical operators (see <a href="#3.4.5">&sect;3.4.5</a>),
+and the concatenation operator (see <a href="#3.4.6">&sect;3.4.6</a>).
+Unary operators comprise the unary minus (see <a href="#3.4.1">&sect;3.4.1</a>),
+the unary bitwise NOT (see <a href="#3.4.2">&sect;3.4.2</a>),
+the unary logical <b>not</b> (see <a href="#3.4.5">&sect;3.4.5</a>),
+and the unary <em>length operator</em> (see <a href="#3.4.7">&sect;3.4.7</a>).
+
+
+<p>
+Both function calls and vararg expressions can result in multiple values.
+If a function call is used as a statement (see <a href="#3.3.6">&sect;3.3.6</a>),
+then its return list is adjusted to zero elements,
+thus discarding all returned values.
+If an expression is used as the last (or the only) element
+of a list of expressions,
+then no adjustment is made
+(unless the expression is enclosed in parentheses).
+In all other contexts,
+Lua adjusts the result list to one element,
+either discarding all values except the first one
+or adding a single <b>nil</b> if there are no values.
+
+
+<p>
+Here are some examples:
+
+<pre>
+ f() -- adjusted to 0 results
+ g(f(), x) -- f() is adjusted to 1 result
+ g(x, f()) -- g gets x plus all results from f()
+ a,b,c = f(), x -- f() is adjusted to 1 result (c gets nil)
+ a,b = ... -- a gets the first vararg parameter, b gets
+ -- the second (both a and b can get nil if there
+ -- is no corresponding vararg parameter)
+
+ a,b,c = x, f() -- f() is adjusted to 2 results
+ a,b,c = f() -- f() is adjusted to 3 results
+ return f() -- returns all results from f()
+ return ... -- returns all received vararg parameters
+ return x,y,f() -- returns x, y, and all results from f()
+ {f()} -- creates a list with all results from f()
+ {...} -- creates a list with all vararg parameters
+ {f(), nil} -- f() is adjusted to 1 result
+</pre>
+
+<p>
+Any expression enclosed in parentheses always results in only one value.
+Thus,
+<code>(f(x,y,z))</code> is always a single value,
+even if <code>f</code> returns several values.
+(The value of <code>(f(x,y,z))</code> is the first value returned by <code>f</code>
+or <b>nil</b> if <code>f</code> does not return any values.)
+
+
+
+<h3>3.4.1 &ndash; <a name="3.4.1">Arithmetic Operators</a></h3><p>
+Lua supports the following arithmetic operators:
+
+<ul>
+<li><b><code>+</code>: </b>addition</li>
+<li><b><code>-</code>: </b>subtraction</li>
+<li><b><code>*</code>: </b>multiplication</li>
+<li><b><code>/</code>: </b>float division</li>
+<li><b><code>//</code>: </b>floor division</li>
+<li><b><code>%</code>: </b>modulo</li>
+<li><b><code>^</code>: </b>exponentiation</li>
+<li><b><code>-</code>: </b>unary minus</li>
+</ul>
+
+<p>
+With the exception of exponentiation and float division,
+the arithmetic operators work as follows:
+If both operands are integers,
+the operation is performed over integers and the result is an integer.
+Otherwise, if both operands are numbers
+or strings that can be converted to
+numbers (see <a href="#3.4.3">&sect;3.4.3</a>),
+then they are converted to floats,
+the operation is performed following the usual rules
+for floating-point arithmetic
+(usually the IEEE 754 standard),
+and the result is a float.
+
+
+<p>
+Exponentiation and float division (<code>/</code>)
+always convert their operands to floats
+and the result is always a float.
+Exponentiation uses the ISO&nbsp;C function <code>pow</code>,
+so that it works for non-integer exponents too.
+
+
+<p>
+Floor division (<code>//</code>) is a division
+that rounds the quotient towards minus infinity,
+that is, the floor of the division of its operands.
+
+
+<p>
+Modulo is defined as the remainder of a division
+that rounds the quotient towards minus infinity (floor division).
+
+
+<p>
+In case of overflows in integer arithmetic,
+all operations <em>wrap around</em>,
+according to the usual rules of two-complement arithmetic.
+(In other words,
+they return the unique representable integer
+that is equal modulo <em>2<sup>64</sup></em> to the mathematical result.)
+
+
+
+<h3>3.4.2 &ndash; <a name="3.4.2">Bitwise Operators</a></h3><p>
+Lua supports the following bitwise operators:
+
+<ul>
+<li><b><code>&amp;</code>: </b>bitwise AND</li>
+<li><b><code>&#124;</code>: </b>bitwise OR</li>
+<li><b><code>~</code>: </b>bitwise exclusive OR</li>
+<li><b><code>&gt;&gt;</code>: </b>right shift</li>
+<li><b><code>&lt;&lt;</code>: </b>left shift</li>
+<li><b><code>~</code>: </b>unary bitwise NOT</li>
+</ul>
+
+<p>
+All bitwise operations convert its operands to integers
+(see <a href="#3.4.3">&sect;3.4.3</a>),
+operate on all bits of those integers,
+and result in an integer.
+
+
+<p>
+Both right and left shifts fill the vacant bits with zeros.
+Negative displacements shift to the other direction;
+displacements with absolute values equal to or higher than
+the number of bits in an integer
+result in zero (as all bits are shifted out).
+
+
+
+
+
+<h3>3.4.3 &ndash; <a name="3.4.3">Coercions and Conversions</a></h3><p>
+Lua provides some automatic conversions between some
+types and representations at run time.
+Bitwise operators always convert float operands to integers.
+Exponentiation and float division
+always convert integer operands to floats.
+All other arithmetic operations applied to mixed numbers
+(integers and floats) convert the integer operand to a float;
+this is called the <em>usual rule</em>.
+The C API also converts both integers to floats and
+floats to integers, as needed.
+Moreover, string concatenation accepts numbers as arguments,
+besides strings.
+
+
+<p>
+Lua also converts strings to numbers,
+whenever a number is expected.
+
+
+<p>
+In a conversion from integer to float,
+if the integer value has an exact representation as a float,
+that is the result.
+Otherwise,
+the conversion gets the nearest higher or
+the nearest lower representable value.
+This kind of conversion never fails.
+
+
+<p>
+The conversion from float to integer
+checks whether the float has an exact representation as an integer
+(that is, the float has an integral value and
+it is in the range of integer representation).
+If it does, that representation is the result.
+Otherwise, the conversion fails.
+
+
+<p>
+The conversion from strings to numbers goes as follows:
+First, the string is converted to an integer or a float,
+following its syntax and the rules of the Lua lexer.
+(The string may have also leading and trailing spaces and a sign.)
+Then, the resulting number (float or integer)
+is converted to the type (float or integer) required by the context
+(e.g., the operation that forced the conversion).
+
+
+<p>
+All conversions from strings to numbers
+accept both a dot and the current locale mark
+as the radix character.
+(The Lua lexer, however, accepts only a dot.)
+
+
+<p>
+The conversion from numbers to strings uses a
+non-specified human-readable format.
+For complete control over how numbers are converted to strings,
+use the <code>format</code> function from the string library
+(see <a href="#pdf-string.format"><code>string.format</code></a>).
+
+
+
+
+
+<h3>3.4.4 &ndash; <a name="3.4.4">Relational Operators</a></h3><p>
+Lua supports the following relational operators:
+
+<ul>
+<li><b><code>==</code>: </b>equality</li>
+<li><b><code>~=</code>: </b>inequality</li>
+<li><b><code>&lt;</code>: </b>less than</li>
+<li><b><code>&gt;</code>: </b>greater than</li>
+<li><b><code>&lt;=</code>: </b>less or equal</li>
+<li><b><code>&gt;=</code>: </b>greater or equal</li>
+</ul><p>
+These operators always result in <b>false</b> or <b>true</b>.
+
+
+<p>
+Equality (<code>==</code>) first compares the type of its operands.
+If the types are different, then the result is <b>false</b>.
+Otherwise, the values of the operands are compared.
+Strings are compared in the obvious way.
+Numbers are equal if they denote the same mathematical value.
+
+
+<p>
+Tables, userdata, and threads
+are compared by reference:
+two objects are considered equal only if they are the same object.
+Every time you create a new object
+(a table, userdata, or thread),
+this new object is different from any previously existing object.
+Closures with the same reference are always equal.
+Closures with any detectable difference
+(different behavior, different definition) are always different.
+
+
+<p>
+You can change the way that Lua compares tables and userdata
+by using the "eq" metamethod (see <a href="#2.4">&sect;2.4</a>).
+
+
+<p>
+Equality comparisons do not convert strings to numbers
+or vice versa.
+Thus, <code>"0"==0</code> evaluates to <b>false</b>,
+and <code>t[0]</code> and <code>t["0"]</code> denote different
+entries in a table.
+
+
+<p>
+The operator <code>~=</code> is exactly the negation of equality (<code>==</code>).
+
+
+<p>
+The order operators work as follows.
+If both arguments are numbers,
+then they are compared according to their mathematical values
+(regardless of their subtypes).
+Otherwise, if both arguments are strings,
+then their values are compared according to the current locale.
+Otherwise, Lua tries to call the "lt" or the "le"
+metamethod (see <a href="#2.4">&sect;2.4</a>).
+A comparison <code>a &gt; b</code> is translated to <code>b &lt; a</code>
+and <code>a &gt;= b</code> is translated to <code>b &lt;= a</code>.
+
+
+<p>
+Following the IEEE 754 standard,
+NaN is considered neither smaller than,
+nor equal to, nor greater than any value (including itself).
+
+
+
+
+
+<h3>3.4.5 &ndash; <a name="3.4.5">Logical Operators</a></h3><p>
+The logical operators in Lua are
+<b>and</b>, <b>or</b>, and <b>not</b>.
+Like the control structures (see <a href="#3.3.4">&sect;3.3.4</a>),
+all logical operators consider both <b>false</b> and <b>nil</b> as false
+and anything else as true.
+
+
+<p>
+The negation operator <b>not</b> always returns <b>false</b> or <b>true</b>.
+The conjunction operator <b>and</b> returns its first argument
+if this value is <b>false</b> or <b>nil</b>;
+otherwise, <b>and</b> returns its second argument.
+The disjunction operator <b>or</b> returns its first argument
+if this value is different from <b>nil</b> and <b>false</b>;
+otherwise, <b>or</b> returns its second argument.
+Both <b>and</b> and <b>or</b> use short-circuit evaluation;
+that is,
+the second operand is evaluated only if necessary.
+Here are some examples:
+
+<pre>
+ 10 or 20 --&gt; 10
+ 10 or error() --&gt; 10
+ nil or "a" --&gt; "a"
+ nil and 10 --&gt; nil
+ false and error() --&gt; false
+ false and nil --&gt; false
+ false or nil --&gt; nil
+ 10 and 20 --&gt; 20
+</pre><p>
+(In this manual,
+<code>--&gt;</code> indicates the result of the preceding expression.)
+
+
+
+
+
+<h3>3.4.6 &ndash; <a name="3.4.6">Concatenation</a></h3><p>
+The string concatenation operator in Lua is
+denoted by two dots ('<code>..</code>').
+If both operands are strings or numbers, then they are converted to
+strings according to the rules described in <a href="#3.4.3">&sect;3.4.3</a>.
+Otherwise, the <code>__concat</code> metamethod is called (see <a href="#2.4">&sect;2.4</a>).
+
+
+
+
+
+<h3>3.4.7 &ndash; <a name="3.4.7">The Length Operator</a></h3>
+
+<p>
+The length operator is denoted by the unary prefix operator <code>#</code>.
+
+
+<p>
+The length of a string is its number of bytes
+(that is, the usual meaning of string length when each
+character is one byte).
+
+
+<p>
+The length operator applied on a table
+returns a border in that table.
+A <em>border</em> in a table <code>t</code> is any natural number
+that satisfies the following condition:
+
+<pre>
+ (border == 0 or t[border] ~= nil) and t[border + 1] == nil
+</pre><p>
+In words,
+a border is any (natural) index in a table
+where a non-nil value is followed by a nil value
+(or zero, when index 1 is nil).
+
+
+<p>
+A table with exactly one border is called a <em>sequence</em>.
+For instance, the table <code>{10, 20, 30, 40, 50}</code> is a sequence,
+as it has only one border (5).
+The table <code>{10, 20, 30, nil, 50}</code> has two borders (3 and 5),
+and therefore it is not a sequence.
+The table <code>{nil, 20, 30, nil, nil, 60, nil}</code>
+has three borders (0, 3, and 6),
+so it is not a sequence, too.
+The table <code>{}</code> is a sequence with border 0.
+Note that non-natural keys do not interfere
+with whether a table is a sequence.
+
+
+<p>
+When <code>t</code> is a sequence,
+<code>#t</code> returns its only border,
+which corresponds to the intuitive notion of the length of the sequence.
+When <code>t</code> is not a sequence,
+<code>#t</code> can return any of its borders.
+(The exact one depends on details of
+the internal representation of the table,
+which in turn can depend on how the table was populated and
+the memory addresses of its non-numeric keys.)
+
+
+<p>
+The computation of the length of a table
+has a guaranteed worst time of <em>O(log n)</em>,
+where <em>n</em> is the largest natural key in the table.
+
+
+<p>
+A program can modify the behavior of the length operator for
+any value but strings through the <code>__len</code> metamethod (see <a href="#2.4">&sect;2.4</a>).
+
+
+
+
+
+<h3>3.4.8 &ndash; <a name="3.4.8">Precedence</a></h3><p>
+Operator precedence in Lua follows the table below,
+from lower to higher priority:
+
+<pre>
+ or
+ and
+ &lt; &gt; &lt;= &gt;= ~= ==
+ |
+ ~
+ &amp;
+ &lt;&lt; &gt;&gt;
+ ..
+ + -
+ * / // %
+ unary operators (not # - ~)
+ ^
+</pre><p>
+As usual,
+you can use parentheses to change the precedences of an expression.
+The concatenation ('<code>..</code>') and exponentiation ('<code>^</code>')
+operators are right associative.
+All other binary operators are left associative.
+
+
+
+
+
+<h3>3.4.9 &ndash; <a name="3.4.9">Table Constructors</a></h3><p>
+Table constructors are expressions that create tables.
+Every time a constructor is evaluated, a new table is created.
+A constructor can be used to create an empty table
+or to create a table and initialize some of its fields.
+The general syntax for constructors is
+
+<pre>
+ tableconstructor ::= &lsquo;<b>{</b>&rsquo; [fieldlist] &lsquo;<b>}</b>&rsquo;
+ fieldlist ::= field {fieldsep field} [fieldsep]
+ field ::= &lsquo;<b>[</b>&rsquo; exp &lsquo;<b>]</b>&rsquo; &lsquo;<b>=</b>&rsquo; exp | Name &lsquo;<b>=</b>&rsquo; exp | exp
+ fieldsep ::= &lsquo;<b>,</b>&rsquo; | &lsquo;<b>;</b>&rsquo;
+</pre>
+
+<p>
+Each field of the form <code>[exp1] = exp2</code> adds to the new table an entry
+with key <code>exp1</code> and value <code>exp2</code>.
+A field of the form <code>name = exp</code> is equivalent to
+<code>["name"] = exp</code>.
+Finally, fields of the form <code>exp</code> are equivalent to
+<code>[i] = exp</code>, where <code>i</code> are consecutive integers
+starting with 1.
+Fields in the other formats do not affect this counting.
+For example,
+
+<pre>
+ a = { [f(1)] = g; "x", "y"; x = 1, f(x), [30] = 23; 45 }
+</pre><p>
+is equivalent to
+
+<pre>
+ do
+ local t = {}
+ t[f(1)] = g
+ t[1] = "x" -- 1st exp
+ t[2] = "y" -- 2nd exp
+ t.x = 1 -- t["x"] = 1
+ t[3] = f(x) -- 3rd exp
+ t[30] = 23
+ t[4] = 45 -- 4th exp
+ a = t
+ end
+</pre>
+
+<p>
+The order of the assignments in a constructor is undefined.
+(This order would be relevant only when there are repeated keys.)
+
+
+<p>
+If the last field in the list has the form <code>exp</code>
+and the expression is a function call or a vararg expression,
+then all values returned by this expression enter the list consecutively
+(see <a href="#3.4.10">&sect;3.4.10</a>).
+
+
+<p>
+The field list can have an optional trailing separator,
+as a convenience for machine-generated code.
+
+
+
+
+
+<h3>3.4.10 &ndash; <a name="3.4.10">Function Calls</a></h3><p>
+A function call in Lua has the following syntax:
+
+<pre>
+ functioncall ::= prefixexp args
+</pre><p>
+In a function call,
+first prefixexp and args are evaluated.
+If the value of prefixexp has type <em>function</em>,
+then this function is called
+with the given arguments.
+Otherwise, the prefixexp "call" metamethod is called,
+having as first parameter the value of prefixexp,
+followed by the original call arguments
+(see <a href="#2.4">&sect;2.4</a>).
+
+
+<p>
+The form
+
+<pre>
+ functioncall ::= prefixexp &lsquo;<b>:</b>&rsquo; Name args
+</pre><p>
+can be used to call "methods".
+A call <code>v:name(<em>args</em>)</code>
+is syntactic sugar for <code>v.name(v,<em>args</em>)</code>,
+except that <code>v</code> is evaluated only once.
+
+
+<p>
+Arguments have the following syntax:
+
+<pre>
+ args ::= &lsquo;<b>(</b>&rsquo; [explist] &lsquo;<b>)</b>&rsquo;
+ args ::= tableconstructor
+ args ::= LiteralString
+</pre><p>
+All argument expressions are evaluated before the call.
+A call of the form <code>f{<em>fields</em>}</code> is
+syntactic sugar for <code>f({<em>fields</em>})</code>;
+that is, the argument list is a single new table.
+A call of the form <code>f'<em>string</em>'</code>
+(or <code>f"<em>string</em>"</code> or <code>f[[<em>string</em>]]</code>)
+is syntactic sugar for <code>f('<em>string</em>')</code>;
+that is, the argument list is a single literal string.
+
+
+<p>
+A call of the form <code>return <em>functioncall</em></code> is called
+a <em>tail call</em>.
+Lua implements <em>proper tail calls</em>
+(or <em>proper tail recursion</em>):
+in a tail call,
+the called function reuses the stack entry of the calling function.
+Therefore, there is no limit on the number of nested tail calls that
+a program can execute.
+However, a tail call erases any debug information about the
+calling function.
+Note that a tail call only happens with a particular syntax,
+where the <b>return</b> has one single function call as argument;
+this syntax makes the calling function return exactly
+the returns of the called function.
+So, none of the following examples are tail calls:
+
+<pre>
+ return (f(x)) -- results adjusted to 1
+ return 2 * f(x)
+ return x, f(x) -- additional results
+ f(x); return -- results discarded
+ return x or f(x) -- results adjusted to 1
+</pre>
+
+
+
+
+<h3>3.4.11 &ndash; <a name="3.4.11">Function Definitions</a></h3>
+
+<p>
+The syntax for function definition is
+
+<pre>
+ functiondef ::= <b>function</b> funcbody
+ funcbody ::= &lsquo;<b>(</b>&rsquo; [parlist] &lsquo;<b>)</b>&rsquo; block <b>end</b>
+</pre>
+
+<p>
+The following syntactic sugar simplifies function definitions:
+
+<pre>
+ stat ::= <b>function</b> funcname funcbody
+ stat ::= <b>local</b> <b>function</b> Name funcbody
+ funcname ::= Name {&lsquo;<b>.</b>&rsquo; Name} [&lsquo;<b>:</b>&rsquo; Name]
+</pre><p>
+The statement
+
+<pre>
+ function f () <em>body</em> end
+</pre><p>
+translates to
+
+<pre>
+ f = function () <em>body</em> end
+</pre><p>
+The statement
+
+<pre>
+ function t.a.b.c.f () <em>body</em> end
+</pre><p>
+translates to
+
+<pre>
+ t.a.b.c.f = function () <em>body</em> end
+</pre><p>
+The statement
+
+<pre>
+ local function f () <em>body</em> end
+</pre><p>
+translates to
+
+<pre>
+ local f; f = function () <em>body</em> end
+</pre><p>
+not to
+
+<pre>
+ local f = function () <em>body</em> end
+</pre><p>
+(This only makes a difference when the body of the function
+contains references to <code>f</code>.)
+
+
+<p>
+A function definition is an executable expression,
+whose value has type <em>function</em>.
+When Lua precompiles a chunk,
+all its function bodies are precompiled too.
+Then, whenever Lua executes the function definition,
+the function is <em>instantiated</em> (or <em>closed</em>).
+This function instance (or <em>closure</em>)
+is the final value of the expression.
+
+
+<p>
+Parameters act as local variables that are
+initialized with the argument values:
+
+<pre>
+ parlist ::= namelist [&lsquo;<b>,</b>&rsquo; &lsquo;<b>...</b>&rsquo;] | &lsquo;<b>...</b>&rsquo;
+</pre><p>
+When a function is called,
+the list of arguments is adjusted to
+the length of the list of parameters,
+unless the function is a <em>vararg function</em>,
+which is indicated by three dots ('<code>...</code>')
+at the end of its parameter list.
+A vararg function does not adjust its argument list;
+instead, it collects all extra arguments and supplies them
+to the function through a <em>vararg expression</em>,
+which is also written as three dots.
+The value of this expression is a list of all actual extra arguments,
+similar to a function with multiple results.
+If a vararg expression is used inside another expression
+or in the middle of a list of expressions,
+then its return list is adjusted to one element.
+If the expression is used as the last element of a list of expressions,
+then no adjustment is made
+(unless that last expression is enclosed in parentheses).
+
+
+<p>
+As an example, consider the following definitions:
+
+<pre>
+ function f(a, b) end
+ function g(a, b, ...) end
+ function r() return 1,2,3 end
+</pre><p>
+Then, we have the following mapping from arguments to parameters and
+to the vararg expression:
+
+<pre>
+ CALL PARAMETERS
+
+ f(3) a=3, b=nil
+ f(3, 4) a=3, b=4
+ f(3, 4, 5) a=3, b=4
+ f(r(), 10) a=1, b=10
+ f(r()) a=1, b=2
+
+ g(3) a=3, b=nil, ... --&gt; (nothing)
+ g(3, 4) a=3, b=4, ... --&gt; (nothing)
+ g(3, 4, 5, 8) a=3, b=4, ... --&gt; 5 8
+ g(5, r()) a=5, b=1, ... --&gt; 2 3
+</pre>
+
+<p>
+Results are returned using the <b>return</b> statement (see <a href="#3.3.4">&sect;3.3.4</a>).
+If control reaches the end of a function
+without encountering a <b>return</b> statement,
+then the function returns with no results.
+
+
+<p>
+
+There is a system-dependent limit on the number of values
+that a function may return.
+This limit is guaranteed to be larger than 1000.
+
+
+<p>
+The <em>colon</em> syntax
+is used for defining <em>methods</em>,
+that is, functions that have an implicit extra parameter <code>self</code>.
+Thus, the statement
+
+<pre>
+ function t.a.b.c:f (<em>params</em>) <em>body</em> end
+</pre><p>
+is syntactic sugar for
+
+<pre>
+ t.a.b.c.f = function (self, <em>params</em>) <em>body</em> end
+</pre>
+
+
+
+
+
+
+<h2>3.5 &ndash; <a name="3.5">Visibility Rules</a></h2>
+
+<p>
+
+Lua is a lexically scoped language.
+The scope of a local variable begins at the first statement after
+its declaration and lasts until the last non-void statement
+of the innermost block that includes the declaration.
+Consider the following example:
+
+<pre>
+ x = 10 -- global variable
+ do -- new block
+ local x = x -- new 'x', with value 10
+ print(x) --&gt; 10
+ x = x+1
+ do -- another block
+ local x = x+1 -- another 'x'
+ print(x) --&gt; 12
+ end
+ print(x) --&gt; 11
+ end
+ print(x) --&gt; 10 (the global one)
+</pre>
+
+<p>
+Notice that, in a declaration like <code>local x = x</code>,
+the new <code>x</code> being declared is not in scope yet,
+and so the second <code>x</code> refers to the outside variable.
+
+
+<p>
+Because of the lexical scoping rules,
+local variables can be freely accessed by functions
+defined inside their scope.
+A local variable used by an inner function is called
+an <em>upvalue</em>, or <em>external local variable</em>,
+inside the inner function.
+
+
+<p>
+Notice that each execution of a <b>local</b> statement
+defines new local variables.
+Consider the following example:
+
+<pre>
+ a = {}
+ local x = 20
+ for i=1,10 do
+ local y = 0
+ a[i] = function () y=y+1; return x+y end
+ end
+</pre><p>
+The loop creates ten closures
+(that is, ten instances of the anonymous function).
+Each of these closures uses a different <code>y</code> variable,
+while all of them share the same <code>x</code>.
+
+
+
+
+
+<h1>4 &ndash; <a name="4">The Application Program Interface</a></h1>
+
+<p>
+
+This section describes the C&nbsp;API for Lua, that is,
+the set of C&nbsp;functions available to the host program to communicate
+with Lua.
+All API functions and related types and constants
+are declared in the header file <a name="pdf-lua.h"><code>lua.h</code></a>.
+
+
+<p>
+Even when we use the term "function",
+any facility in the API may be provided as a macro instead.
+Except where stated otherwise,
+all such macros use each of their arguments exactly once
+(except for the first argument, which is always a Lua state),
+and so do not generate any hidden side-effects.
+
+
+<p>
+As in most C&nbsp;libraries,
+the Lua API functions do not check their arguments for validity or consistency.
+However, you can change this behavior by compiling Lua
+with the macro <a name="pdf-LUA_USE_APICHECK"><code>LUA_USE_APICHECK</code></a> defined.
+
+
+<p>
+The Lua library is fully reentrant:
+it has no global variables.
+It keeps all information it needs in a dynamic structure,
+called the <em>Lua state</em>.
+
+
+<p>
+Each Lua state has one or more threads,
+which correspond to independent, cooperative lines of execution.
+The type <a href="#lua_State"><code>lua_State</code></a> (despite its name) refers to a thread.
+(Indirectly, through the thread, it also refers to the
+Lua state associated to the thread.)
+
+
+<p>
+A pointer to a thread must be passed as the first argument to
+every function in the library, except to <a href="#lua_newstate"><code>lua_newstate</code></a>,
+which creates a Lua state from scratch and returns a pointer
+to the <em>main thread</em> in the new state.
+
+
+
+<h2>4.1 &ndash; <a name="4.1">The Stack</a></h2>
+
+<p>
+Lua uses a <em>virtual stack</em> to pass values to and from C.
+Each element in this stack represents a Lua value
+(<b>nil</b>, number, string, etc.).
+Functions in the API can access this stack through the
+Lua state parameter that they receive.
+
+
+<p>
+Whenever Lua calls C, the called function gets a new stack,
+which is independent of previous stacks and of stacks of
+C&nbsp;functions that are still active.
+This stack initially contains any arguments to the C&nbsp;function
+and it is where the C&nbsp;function can store temporary
+Lua values and must push its results
+to be returned to the caller (see <a href="#lua_CFunction"><code>lua_CFunction</code></a>).
+
+
+<p>
+For convenience,
+most query operations in the API do not follow a strict stack discipline.
+Instead, they can refer to any element in the stack
+by using an <em>index</em>:
+A positive index represents an absolute stack position
+(starting at&nbsp;1);
+a negative index represents an offset relative to the top of the stack.
+More specifically, if the stack has <em>n</em> elements,
+then index&nbsp;1 represents the first element
+(that is, the element that was pushed onto the stack first)
+and
+index&nbsp;<em>n</em> represents the last element;
+index&nbsp;-1 also represents the last element
+(that is, the element at the&nbsp;top)
+and index <em>-n</em> represents the first element.
+
+
+
+
+
+<h2>4.2 &ndash; <a name="4.2">Stack Size</a></h2>
+
+<p>
+When you interact with the Lua API,
+you are responsible for ensuring consistency.
+In particular,
+<em>you are responsible for controlling stack overflow</em>.
+You can use the function <a href="#lua_checkstack"><code>lua_checkstack</code></a>
+to ensure that the stack has enough space for pushing new elements.
+
+
+<p>
+Whenever Lua calls C,
+it ensures that the stack has space for
+at least <a name="pdf-LUA_MINSTACK"><code>LUA_MINSTACK</code></a> extra slots.
+<code>LUA_MINSTACK</code> is defined as 20,
+so that usually you do not have to worry about stack space
+unless your code has loops pushing elements onto the stack.
+
+
+<p>
+When you call a Lua function
+without a fixed number of results (see <a href="#lua_call"><code>lua_call</code></a>),
+Lua ensures that the stack has enough space for all results,
+but it does not ensure any extra space.
+So, before pushing anything in the stack after such a call
+you should use <a href="#lua_checkstack"><code>lua_checkstack</code></a>.
+
+
+
+
+
+<h2>4.3 &ndash; <a name="4.3">Valid and Acceptable Indices</a></h2>
+
+<p>
+Any function in the API that receives stack indices
+works only with <em>valid indices</em> or <em>acceptable indices</em>.
+
+
+<p>
+A <em>valid index</em> is an index that refers to a
+position that stores a modifiable Lua value.
+It comprises stack indices between&nbsp;1 and the stack top
+(<code>1 &le; abs(index) &le; top</code>)
+
+plus <em>pseudo-indices</em>,
+which represent some positions that are accessible to C&nbsp;code
+but that are not in the stack.
+Pseudo-indices are used to access the registry (see <a href="#4.5">&sect;4.5</a>)
+and the upvalues of a C&nbsp;function (see <a href="#4.4">&sect;4.4</a>).
+
+
+<p>
+Functions that do not need a specific mutable position,
+but only a value (e.g., query functions),
+can be called with acceptable indices.
+An <em>acceptable index</em> can be any valid index,
+but it also can be any positive index after the stack top
+within the space allocated for the stack,
+that is, indices up to the stack size.
+(Note that 0 is never an acceptable index.)
+Except when noted otherwise,
+functions in the API work with acceptable indices.
+
+
+<p>
+Acceptable indices serve to avoid extra tests
+against the stack top when querying the stack.
+For instance, a C&nbsp;function can query its third argument
+without the need to first check whether there is a third argument,
+that is, without the need to check whether 3 is a valid index.
+
+
+<p>
+For functions that can be called with acceptable indices,
+any non-valid index is treated as if it
+contains a value of a virtual type <a name="pdf-LUA_TNONE"><code>LUA_TNONE</code></a>,
+which behaves like a nil value.
+
+
+
+
+
+<h2>4.4 &ndash; <a name="4.4">C Closures</a></h2>
+
+<p>
+When a C&nbsp;function is created,
+it is possible to associate some values with it,
+thus creating a <em>C&nbsp;closure</em>
+(see <a href="#lua_pushcclosure"><code>lua_pushcclosure</code></a>);
+these values are called <em>upvalues</em> and are
+accessible to the function whenever it is called.
+
+
+<p>
+Whenever a C&nbsp;function is called,
+its upvalues are located at specific pseudo-indices.
+These pseudo-indices are produced by the macro
+<a href="#lua_upvalueindex"><code>lua_upvalueindex</code></a>.
+The first upvalue associated with a function is at index
+<code>lua_upvalueindex(1)</code>, and so on.
+Any access to <code>lua_upvalueindex(<em>n</em>)</code>,
+where <em>n</em> is greater than the number of upvalues of the
+current function
+(but not greater than 256,
+which is one plus the maximum number of upvalues in a closure),
+produces an acceptable but invalid index.
+
+
+
+
+
+<h2>4.5 &ndash; <a name="4.5">Registry</a></h2>
+
+<p>
+Lua provides a <em>registry</em>,
+a predefined table that can be used by any C&nbsp;code to
+store whatever Lua values it needs to store.
+The registry table is always located at pseudo-index
+<a name="pdf-LUA_REGISTRYINDEX"><code>LUA_REGISTRYINDEX</code></a>.
+Any C&nbsp;library can store data into this table,
+but it must take care to choose keys
+that are different from those used
+by other libraries, to avoid collisions.
+Typically, you should use as key a string containing your library name,
+or a light userdata with the address of a C&nbsp;object in your code,
+or any Lua object created by your code.
+As with variable names,
+string keys starting with an underscore followed by
+uppercase letters are reserved for Lua.
+
+
+<p>
+The integer keys in the registry are used
+by the reference mechanism (see <a href="#luaL_ref"><code>luaL_ref</code></a>)
+and by some predefined values.
+Therefore, integer keys must not be used for other purposes.
+
+
+<p>
+When you create a new Lua state,
+its registry comes with some predefined values.
+These predefined values are indexed with integer keys
+defined as constants in <code>lua.h</code>.
+The following constants are defined:
+
+<ul>
+<li><b><a name="pdf-LUA_RIDX_MAINTHREAD"><code>LUA_RIDX_MAINTHREAD</code></a>: </b> At this index the registry has
+the main thread of the state.
+(The main thread is the one created together with the state.)
+</li>
+
+<li><b><a name="pdf-LUA_RIDX_GLOBALS"><code>LUA_RIDX_GLOBALS</code></a>: </b> At this index the registry has
+the global environment.
+</li>
+</ul>
+
+
+
+
+<h2>4.6 &ndash; <a name="4.6">Error Handling in C</a></h2>
+
+<p>
+Internally, Lua uses the C <code>longjmp</code> facility to handle errors.
+(Lua will use exceptions if you compile it as C++;
+search for <code>LUAI_THROW</code> in the source code for details.)
+When Lua faces any error
+(such as a memory allocation error or a type error)
+it <em>raises</em> an error;
+that is, it does a long jump.
+A <em>protected environment</em> uses <code>setjmp</code>
+to set a recovery point;
+any error jumps to the most recent active recovery point.
+
+
+<p>
+Inside a C&nbsp;function you can raise an error by calling <a href="#lua_error"><code>lua_error</code></a>.
+
+
+<p>
+Most functions in the API can raise an error,
+for instance due to a memory allocation error.
+The documentation for each function indicates whether
+it can raise errors.
+
+
+<p>
+If an error happens outside any protected environment,
+Lua calls a <em>panic function</em> (see <a href="#lua_atpanic"><code>lua_atpanic</code></a>)
+and then calls <code>abort</code>,
+thus exiting the host application.
+Your panic function can avoid this exit by
+never returning
+(e.g., doing a long jump to your own recovery point outside Lua).
+
+
+<p>
+The panic function,
+as its name implies,
+is a mechanism of last resort.
+Programs should avoid it.
+As a general rule,
+when a C&nbsp;function is called by Lua with a Lua state,
+it can do whatever it wants on that Lua state,
+as it should be already protected.
+However,
+when C code operates on other Lua states
+(e.g., a Lua parameter to the function,
+a Lua state stored in the registry, or
+the result of <a href="#lua_newthread"><code>lua_newthread</code></a>),
+it should use them only in API calls that cannot raise errors.
+
+
+<p>
+The panic function runs as if it were a message handler (see <a href="#2.3">&sect;2.3</a>);
+in particular, the error object is at the top of the stack.
+However, there is no guarantee about stack space.
+To push anything on the stack,
+the panic function must first check the available space (see <a href="#4.2">&sect;4.2</a>).
+
+
+
+
+
+<h2>4.7 &ndash; <a name="4.7">Handling Yields in C</a></h2>
+
+<p>
+Internally, Lua uses the C <code>longjmp</code> facility to yield a coroutine.
+Therefore, if a C&nbsp;function <code>foo</code> calls an API function
+and this API function yields
+(directly or indirectly by calling another function that yields),
+Lua cannot return to <code>foo</code> any more,
+because the <code>longjmp</code> removes its frame from the C stack.
+
+
+<p>
+To avoid this kind of problem,
+Lua raises an error whenever it tries to yield across an API call,
+except for three functions:
+<a href="#lua_yieldk"><code>lua_yieldk</code></a>, <a href="#lua_callk"><code>lua_callk</code></a>, and <a href="#lua_pcallk"><code>lua_pcallk</code></a>.
+All those functions receive a <em>continuation function</em>
+(as a parameter named <code>k</code>) to continue execution after a yield.
+
+
+<p>
+We need to set some terminology to explain continuations.
+We have a C&nbsp;function called from Lua which we will call
+the <em>original function</em>.
+This original function then calls one of those three functions in the C API,
+which we will call the <em>callee function</em>,
+that then yields the current thread.
+(This can happen when the callee function is <a href="#lua_yieldk"><code>lua_yieldk</code></a>,
+or when the callee function is either <a href="#lua_callk"><code>lua_callk</code></a> or <a href="#lua_pcallk"><code>lua_pcallk</code></a>
+and the function called by them yields.)
+
+
+<p>
+Suppose the running thread yields while executing the callee function.
+After the thread resumes,
+it eventually will finish running the callee function.
+However,
+the callee function cannot return to the original function,
+because its frame in the C stack was destroyed by the yield.
+Instead, Lua calls a <em>continuation function</em>,
+which was given as an argument to the callee function.
+As the name implies,
+the continuation function should continue the task
+of the original function.
+
+
+<p>
+As an illustration, consider the following function:
+
+<pre>
+ int original_function (lua_State *L) {
+ ... /* code 1 */
+ status = lua_pcall(L, n, m, h); /* calls Lua */
+ ... /* code 2 */
+ }
+</pre><p>
+Now we want to allow
+the Lua code being run by <a href="#lua_pcall"><code>lua_pcall</code></a> to yield.
+First, we can rewrite our function like here:
+
+<pre>
+ int k (lua_State *L, int status, lua_KContext ctx) {
+ ... /* code 2 */
+ }
+
+ int original_function (lua_State *L) {
+ ... /* code 1 */
+ return k(L, lua_pcall(L, n, m, h), ctx);
+ }
+</pre><p>
+In the above code,
+the new function <code>k</code> is a
+<em>continuation function</em> (with type <a href="#lua_KFunction"><code>lua_KFunction</code></a>),
+which should do all the work that the original function
+was doing after calling <a href="#lua_pcall"><code>lua_pcall</code></a>.
+Now, we must inform Lua that it must call <code>k</code> if the Lua code
+being executed by <a href="#lua_pcall"><code>lua_pcall</code></a> gets interrupted in some way
+(errors or yielding),
+so we rewrite the code as here,
+replacing <a href="#lua_pcall"><code>lua_pcall</code></a> by <a href="#lua_pcallk"><code>lua_pcallk</code></a>:
+
+<pre>
+ int original_function (lua_State *L) {
+ ... /* code 1 */
+ return k(L, lua_pcallk(L, n, m, h, ctx2, k), ctx1);
+ }
+</pre><p>
+Note the external, explicit call to the continuation:
+Lua will call the continuation only if needed, that is,
+in case of errors or resuming after a yield.
+If the called function returns normally without ever yielding,
+<a href="#lua_pcallk"><code>lua_pcallk</code></a> (and <a href="#lua_callk"><code>lua_callk</code></a>) will also return normally.
+(Of course, instead of calling the continuation in that case,
+you can do the equivalent work directly inside the original function.)
+
+
+<p>
+Besides the Lua state,
+the continuation function has two other parameters:
+the final status of the call plus the context value (<code>ctx</code>) that
+was passed originally to <a href="#lua_pcallk"><code>lua_pcallk</code></a>.
+(Lua does not use this context value;
+it only passes this value from the original function to the
+continuation function.)
+For <a href="#lua_pcallk"><code>lua_pcallk</code></a>,
+the status is the same value that would be returned by <a href="#lua_pcallk"><code>lua_pcallk</code></a>,
+except that it is <a href="#pdf-LUA_YIELD"><code>LUA_YIELD</code></a> when being executed after a yield
+(instead of <a href="#pdf-LUA_OK"><code>LUA_OK</code></a>).
+For <a href="#lua_yieldk"><code>lua_yieldk</code></a> and <a href="#lua_callk"><code>lua_callk</code></a>,
+the status is always <a href="#pdf-LUA_YIELD"><code>LUA_YIELD</code></a> when Lua calls the continuation.
+(For these two functions,
+Lua will not call the continuation in case of errors,
+because they do not handle errors.)
+Similarly, when using <a href="#lua_callk"><code>lua_callk</code></a>,
+you should call the continuation function
+with <a href="#pdf-LUA_OK"><code>LUA_OK</code></a> as the status.
+(For <a href="#lua_yieldk"><code>lua_yieldk</code></a>, there is not much point in calling
+directly the continuation function,
+because <a href="#lua_yieldk"><code>lua_yieldk</code></a> usually does not return.)
+
+
+<p>
+Lua treats the continuation function as if it were the original function.
+The continuation function receives the same Lua stack
+from the original function,
+in the same state it would be if the callee function had returned.
+(For instance,
+after a <a href="#lua_callk"><code>lua_callk</code></a> the function and its arguments are
+removed from the stack and replaced by the results from the call.)
+It also has the same upvalues.
+Whatever it returns is handled by Lua as if it were the return
+of the original function.
+
+
+
+
+
+<h2>4.8 &ndash; <a name="4.8">Functions and Types</a></h2>
+
+<p>
+Here we list all functions and types from the C&nbsp;API in
+alphabetical order.
+Each function has an indicator like this:
+<span class="apii">[-o, +p, <em>x</em>]</span>
+
+
+<p>
+The first field, <code>o</code>,
+is how many elements the function pops from the stack.
+The second field, <code>p</code>,
+is how many elements the function pushes onto the stack.
+(Any function always pushes its results after popping its arguments.)
+A field in the form <code>x|y</code> means the function can push (or pop)
+<code>x</code> or <code>y</code> elements,
+depending on the situation;
+an interrogation mark '<code>?</code>' means that
+we cannot know how many elements the function pops/pushes
+by looking only at its arguments
+(e.g., they may depend on what is on the stack).
+The third field, <code>x</code>,
+tells whether the function may raise errors:
+'<code>-</code>' means the function never raises any error;
+'<code>m</code>' means the function may raise out-of-memory errors
+and errors running a <code>__gc</code> metamethod;
+'<code>e</code>' means the function may raise any errors
+(it can run arbitrary Lua code,
+either directly or through metamethods);
+'<code>v</code>' means the function may raise an error on purpose.
+
+
+
+<hr><h3><a name="lua_absindex"><code>lua_absindex</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_absindex (lua_State *L, int idx);</pre>
+
+<p>
+Converts the acceptable index <code>idx</code>
+into an equivalent absolute index
+(that is, one that does not depend on the stack top).
+
+
+
+
+
+<hr><h3><a name="lua_Alloc"><code>lua_Alloc</code></a></h3>
+<pre>typedef void * (*lua_Alloc) (void *ud,
+ void *ptr,
+ size_t osize,
+ size_t nsize);</pre>
+
+<p>
+The type of the memory-allocation function used by Lua states.
+The allocator function must provide a
+functionality similar to <code>realloc</code>,
+but not exactly the same.
+Its arguments are
+<code>ud</code>, an opaque pointer passed to <a href="#lua_newstate"><code>lua_newstate</code></a>;
+<code>ptr</code>, a pointer to the block being allocated/reallocated/freed;
+<code>osize</code>, the original size of the block or some code about what
+is being allocated;
+and <code>nsize</code>, the new size of the block.
+
+
+<p>
+When <code>ptr</code> is not <code>NULL</code>,
+<code>osize</code> is the size of the block pointed by <code>ptr</code>,
+that is, the size given when it was allocated or reallocated.
+
+
+<p>
+When <code>ptr</code> is <code>NULL</code>,
+<code>osize</code> encodes the kind of object that Lua is allocating.
+<code>osize</code> is any of
+<a href="#pdf-LUA_TSTRING"><code>LUA_TSTRING</code></a>, <a href="#pdf-LUA_TTABLE"><code>LUA_TTABLE</code></a>, <a href="#pdf-LUA_TFUNCTION"><code>LUA_TFUNCTION</code></a>,
+<a href="#pdf-LUA_TUSERDATA"><code>LUA_TUSERDATA</code></a>, or <a href="#pdf-LUA_TTHREAD"><code>LUA_TTHREAD</code></a> when (and only when)
+Lua is creating a new object of that type.
+When <code>osize</code> is some other value,
+Lua is allocating memory for something else.
+
+
+<p>
+Lua assumes the following behavior from the allocator function:
+
+
+<p>
+When <code>nsize</code> is zero,
+the allocator must behave like <code>free</code>
+and return <code>NULL</code>.
+
+
+<p>
+When <code>nsize</code> is not zero,
+the allocator must behave like <code>realloc</code>.
+The allocator returns <code>NULL</code>
+if and only if it cannot fulfill the request.
+Lua assumes that the allocator never fails when
+<code>osize &gt;= nsize</code>.
+
+
+<p>
+Here is a simple implementation for the allocator function.
+It is used in the auxiliary library by <a href="#luaL_newstate"><code>luaL_newstate</code></a>.
+
+<pre>
+ static void *l_alloc (void *ud, void *ptr, size_t osize,
+ size_t nsize) {
+ (void)ud; (void)osize; /* not used */
+ if (nsize == 0) {
+ free(ptr);
+ return NULL;
+ }
+ else
+ return realloc(ptr, nsize);
+ }
+</pre><p>
+Note that Standard&nbsp;C ensures
+that <code>free(NULL)</code> has no effect and that
+<code>realloc(NULL,size)</code> is equivalent to <code>malloc(size)</code>.
+This code assumes that <code>realloc</code> does not fail when shrinking a block.
+(Although Standard&nbsp;C does not ensure this behavior,
+it seems to be a safe assumption.)
+
+
+
+
+
+<hr><h3><a name="lua_arith"><code>lua_arith</code></a></h3><p>
+<span class="apii">[-(2|1), +1, <em>e</em>]</span>
+<pre>void lua_arith (lua_State *L, int op);</pre>
+
+<p>
+Performs an arithmetic or bitwise operation over the two values
+(or one, in the case of negations)
+at the top of the stack,
+with the value at the top being the second operand,
+pops these values, and pushes the result of the operation.
+The function follows the semantics of the corresponding Lua operator
+(that is, it may call metamethods).
+
+
+<p>
+The value of <code>op</code> must be one of the following constants:
+
+<ul>
+
+<li><b><a name="pdf-LUA_OPADD"><code>LUA_OPADD</code></a>: </b> performs addition (<code>+</code>)</li>
+<li><b><a name="pdf-LUA_OPSUB"><code>LUA_OPSUB</code></a>: </b> performs subtraction (<code>-</code>)</li>
+<li><b><a name="pdf-LUA_OPMUL"><code>LUA_OPMUL</code></a>: </b> performs multiplication (<code>*</code>)</li>
+<li><b><a name="pdf-LUA_OPDIV"><code>LUA_OPDIV</code></a>: </b> performs float division (<code>/</code>)</li>
+<li><b><a name="pdf-LUA_OPIDIV"><code>LUA_OPIDIV</code></a>: </b> performs floor division (<code>//</code>)</li>
+<li><b><a name="pdf-LUA_OPMOD"><code>LUA_OPMOD</code></a>: </b> performs modulo (<code>%</code>)</li>
+<li><b><a name="pdf-LUA_OPPOW"><code>LUA_OPPOW</code></a>: </b> performs exponentiation (<code>^</code>)</li>
+<li><b><a name="pdf-LUA_OPUNM"><code>LUA_OPUNM</code></a>: </b> performs mathematical negation (unary <code>-</code>)</li>
+<li><b><a name="pdf-LUA_OPBNOT"><code>LUA_OPBNOT</code></a>: </b> performs bitwise NOT (<code>~</code>)</li>
+<li><b><a name="pdf-LUA_OPBAND"><code>LUA_OPBAND</code></a>: </b> performs bitwise AND (<code>&amp;</code>)</li>
+<li><b><a name="pdf-LUA_OPBOR"><code>LUA_OPBOR</code></a>: </b> performs bitwise OR (<code>|</code>)</li>
+<li><b><a name="pdf-LUA_OPBXOR"><code>LUA_OPBXOR</code></a>: </b> performs bitwise exclusive OR (<code>~</code>)</li>
+<li><b><a name="pdf-LUA_OPSHL"><code>LUA_OPSHL</code></a>: </b> performs left shift (<code>&lt;&lt;</code>)</li>
+<li><b><a name="pdf-LUA_OPSHR"><code>LUA_OPSHR</code></a>: </b> performs right shift (<code>&gt;&gt;</code>)</li>
+
+</ul>
+
+
+
+
+<hr><h3><a name="lua_atpanic"><code>lua_atpanic</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_CFunction lua_atpanic (lua_State *L, lua_CFunction panicf);</pre>
+
+<p>
+Sets a new panic function and returns the old one (see <a href="#4.6">&sect;4.6</a>).
+
+
+
+
+
+<hr><h3><a name="lua_call"><code>lua_call</code></a></h3><p>
+<span class="apii">[-(nargs+1), +nresults, <em>e</em>]</span>
+<pre>void lua_call (lua_State *L, int nargs, int nresults);</pre>
+
+<p>
+Calls a function.
+
+
+<p>
+To call a function you must use the following protocol:
+first, the function to be called is pushed onto the stack;
+then, the arguments to the function are pushed
+in direct order;
+that is, the first argument is pushed first.
+Finally you call <a href="#lua_call"><code>lua_call</code></a>;
+<code>nargs</code> is the number of arguments that you pushed onto the stack.
+All arguments and the function value are popped from the stack
+when the function is called.
+The function results are pushed onto the stack when the function returns.
+The number of results is adjusted to <code>nresults</code>,
+unless <code>nresults</code> is <a name="pdf-LUA_MULTRET"><code>LUA_MULTRET</code></a>.
+In this case, all results from the function are pushed;
+Lua takes care that the returned values fit into the stack space,
+but it does not ensure any extra space in the stack.
+The function results are pushed onto the stack in direct order
+(the first result is pushed first),
+so that after the call the last result is on the top of the stack.
+
+
+<p>
+Any error inside the called function is propagated upwards
+(with a <code>longjmp</code>).
+
+
+<p>
+The following example shows how the host program can do the
+equivalent to this Lua code:
+
+<pre>
+ a = f("how", t.x, 14)
+</pre><p>
+Here it is in&nbsp;C:
+
+<pre>
+ lua_getglobal(L, "f"); /* function to be called */
+ lua_pushliteral(L, "how"); /* 1st argument */
+ lua_getglobal(L, "t"); /* table to be indexed */
+ lua_getfield(L, -1, "x"); /* push result of t.x (2nd arg) */
+ lua_remove(L, -2); /* remove 't' from the stack */
+ lua_pushinteger(L, 14); /* 3rd argument */
+ lua_call(L, 3, 1); /* call 'f' with 3 arguments and 1 result */
+ lua_setglobal(L, "a"); /* set global 'a' */
+</pre><p>
+Note that the code above is <em>balanced</em>:
+at its end, the stack is back to its original configuration.
+This is considered good programming practice.
+
+
+
+
+
+<hr><h3><a name="lua_callk"><code>lua_callk</code></a></h3><p>
+<span class="apii">[-(nargs + 1), +nresults, <em>e</em>]</span>
+<pre>void lua_callk (lua_State *L,
+ int nargs,
+ int nresults,
+ lua_KContext ctx,
+ lua_KFunction k);</pre>
+
+<p>
+This function behaves exactly like <a href="#lua_call"><code>lua_call</code></a>,
+but allows the called function to yield (see <a href="#4.7">&sect;4.7</a>).
+
+
+
+
+
+<hr><h3><a name="lua_CFunction"><code>lua_CFunction</code></a></h3>
+<pre>typedef int (*lua_CFunction) (lua_State *L);</pre>
+
+<p>
+Type for C&nbsp;functions.
+
+
+<p>
+In order to communicate properly with Lua,
+a C&nbsp;function must use the following protocol,
+which defines the way parameters and results are passed:
+a C&nbsp;function receives its arguments from Lua in its stack
+in direct order (the first argument is pushed first).
+So, when the function starts,
+<code>lua_gettop(L)</code> returns the number of arguments received by the function.
+The first argument (if any) is at index 1
+and its last argument is at index <code>lua_gettop(L)</code>.
+To return values to Lua, a C&nbsp;function just pushes them onto the stack,
+in direct order (the first result is pushed first),
+and returns the number of results.
+Any other value in the stack below the results will be properly
+discarded by Lua.
+Like a Lua function, a C&nbsp;function called by Lua can also return
+many results.
+
+
+<p>
+As an example, the following function receives a variable number
+of numeric arguments and returns their average and their sum:
+
+<pre>
+ static int foo (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ lua_Number sum = 0.0;
+ int i;
+ for (i = 1; i &lt;= n; i++) {
+ if (!lua_isnumber(L, i)) {
+ lua_pushliteral(L, "incorrect argument");
+ lua_error(L);
+ }
+ sum += lua_tonumber(L, i);
+ }
+ lua_pushnumber(L, sum/n); /* first result */
+ lua_pushnumber(L, sum); /* second result */
+ return 2; /* number of results */
+ }
+</pre>
+
+
+
+
+<hr><h3><a name="lua_checkstack"><code>lua_checkstack</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_checkstack (lua_State *L, int n);</pre>
+
+<p>
+Ensures that the stack has space for at least <code>n</code> extra slots
+(that is, that you can safely push up to <code>n</code> values into it).
+It returns false if it cannot fulfill the request,
+either because it would cause the stack
+to be larger than a fixed maximum size
+(typically at least several thousand elements) or
+because it cannot allocate memory for the extra space.
+This function never shrinks the stack;
+if the stack already has space for the extra slots,
+it is left unchanged.
+
+
+
+
+
+<hr><h3><a name="lua_close"><code>lua_close</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_close (lua_State *L);</pre>
+
+<p>
+Destroys all objects in the given Lua state
+(calling the corresponding garbage-collection metamethods, if any)
+and frees all dynamic memory used by this state.
+On several platforms, you may not need to call this function,
+because all resources are naturally released when the host program ends.
+On the other hand, long-running programs that create multiple states,
+such as daemons or web servers,
+will probably need to close states as soon as they are not needed.
+
+
+
+
+
+<hr><h3><a name="lua_compare"><code>lua_compare</code></a></h3><p>
+<span class="apii">[-0, +0, <em>e</em>]</span>
+<pre>int lua_compare (lua_State *L, int index1, int index2, int op);</pre>
+
+<p>
+Compares two Lua values.
+Returns 1 if the value at index <code>index1</code> satisfies <code>op</code>
+when compared with the value at index <code>index2</code>,
+following the semantics of the corresponding Lua operator
+(that is, it may call metamethods).
+Otherwise returns&nbsp;0.
+Also returns&nbsp;0 if any of the indices is not valid.
+
+
+<p>
+The value of <code>op</code> must be one of the following constants:
+
+<ul>
+
+<li><b><a name="pdf-LUA_OPEQ"><code>LUA_OPEQ</code></a>: </b> compares for equality (<code>==</code>)</li>
+<li><b><a name="pdf-LUA_OPLT"><code>LUA_OPLT</code></a>: </b> compares for less than (<code>&lt;</code>)</li>
+<li><b><a name="pdf-LUA_OPLE"><code>LUA_OPLE</code></a>: </b> compares for less or equal (<code>&lt;=</code>)</li>
+
+</ul>
+
+
+
+
+<hr><h3><a name="lua_concat"><code>lua_concat</code></a></h3><p>
+<span class="apii">[-n, +1, <em>e</em>]</span>
+<pre>void lua_concat (lua_State *L, int n);</pre>
+
+<p>
+Concatenates the <code>n</code> values at the top of the stack,
+pops them, and leaves the result at the top.
+If <code>n</code>&nbsp;is&nbsp;1, the result is the single value on the stack
+(that is, the function does nothing);
+if <code>n</code> is 0, the result is the empty string.
+Concatenation is performed following the usual semantics of Lua
+(see <a href="#3.4.6">&sect;3.4.6</a>).
+
+
+
+
+
+<hr><h3><a name="lua_copy"><code>lua_copy</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_copy (lua_State *L, int fromidx, int toidx);</pre>
+
+<p>
+Copies the element at index <code>fromidx</code>
+into the valid index <code>toidx</code>,
+replacing the value at that position.
+Values at other positions are not affected.
+
+
+
+
+
+<hr><h3><a name="lua_createtable"><code>lua_createtable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void lua_createtable (lua_State *L, int narr, int nrec);</pre>
+
+<p>
+Creates a new empty table and pushes it onto the stack.
+Parameter <code>narr</code> is a hint for how many elements the table
+will have as a sequence;
+parameter <code>nrec</code> is a hint for how many other elements
+the table will have.
+Lua may use these hints to preallocate memory for the new table.
+This preallocation is useful for performance when you know in advance
+how many elements the table will have.
+Otherwise you can use the function <a href="#lua_newtable"><code>lua_newtable</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_dump"><code>lua_dump</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_dump (lua_State *L,
+ lua_Writer writer,
+ void *data,
+ int strip);</pre>
+
+<p>
+Dumps a function as a binary chunk.
+Receives a Lua function on the top of the stack
+and produces a binary chunk that,
+if loaded again,
+results in a function equivalent to the one dumped.
+As it produces parts of the chunk,
+<a href="#lua_dump"><code>lua_dump</code></a> calls function <code>writer</code> (see <a href="#lua_Writer"><code>lua_Writer</code></a>)
+with the given <code>data</code>
+to write them.
+
+
+<p>
+If <code>strip</code> is true,
+the binary representation may not include all debug information
+about the function,
+to save space.
+
+
+<p>
+The value returned is the error code returned by the last
+call to the writer;
+0&nbsp;means no errors.
+
+
+<p>
+This function does not pop the Lua function from the stack.
+
+
+
+
+
+<hr><h3><a name="lua_error"><code>lua_error</code></a></h3><p>
+<span class="apii">[-1, +0, <em>v</em>]</span>
+<pre>int lua_error (lua_State *L);</pre>
+
+<p>
+Generates a Lua error,
+using the value at the top of the stack as the error object.
+This function does a long jump,
+and therefore never returns
+(see <a href="#luaL_error"><code>luaL_error</code></a>).
+
+
+
+
+
+<hr><h3><a name="lua_gc"><code>lua_gc</code></a></h3><p>
+<span class="apii">[-0, +0, <em>m</em>]</span>
+<pre>int lua_gc (lua_State *L, int what, int data);</pre>
+
+<p>
+Controls the garbage collector.
+
+
+<p>
+This function performs several tasks,
+according to the value of the parameter <code>what</code>:
+
+<ul>
+
+<li><b><code>LUA_GCSTOP</code>: </b>
+stops the garbage collector.
+</li>
+
+<li><b><code>LUA_GCRESTART</code>: </b>
+restarts the garbage collector.
+</li>
+
+<li><b><code>LUA_GCCOLLECT</code>: </b>
+performs a full garbage-collection cycle.
+</li>
+
+<li><b><code>LUA_GCCOUNT</code>: </b>
+returns the current amount of memory (in Kbytes) in use by Lua.
+</li>
+
+<li><b><code>LUA_GCCOUNTB</code>: </b>
+returns the remainder of dividing the current amount of bytes of
+memory in use by Lua by 1024.
+</li>
+
+<li><b><code>LUA_GCSTEP</code>: </b>
+performs an incremental step of garbage collection.
+</li>
+
+<li><b><code>LUA_GCSETPAUSE</code>: </b>
+sets <code>data</code> as the new value
+for the <em>pause</em> of the collector (see <a href="#2.5">&sect;2.5</a>)
+and returns the previous value of the pause.
+</li>
+
+<li><b><code>LUA_GCSETSTEPMUL</code>: </b>
+sets <code>data</code> as the new value for the <em>step multiplier</em> of
+the collector (see <a href="#2.5">&sect;2.5</a>)
+and returns the previous value of the step multiplier.
+</li>
+
+<li><b><code>LUA_GCISRUNNING</code>: </b>
+returns a boolean that tells whether the collector is running
+(i.e., not stopped).
+</li>
+
+</ul>
+
+<p>
+For more details about these options,
+see <a href="#pdf-collectgarbage"><code>collectgarbage</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_getallocf"><code>lua_getallocf</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Alloc lua_getallocf (lua_State *L, void **ud);</pre>
+
+<p>
+Returns the memory-allocation function of a given state.
+If <code>ud</code> is not <code>NULL</code>, Lua stores in <code>*ud</code> the
+opaque pointer given when the memory-allocator function was set.
+
+
+
+
+
+<hr><h3><a name="lua_getfield"><code>lua_getfield</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>int lua_getfield (lua_State *L, int index, const char *k);</pre>
+
+<p>
+Pushes onto the stack the value <code>t[k]</code>,
+where <code>t</code> is the value at the given index.
+As in Lua, this function may trigger a metamethod
+for the "index" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_getextraspace"><code>lua_getextraspace</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void *lua_getextraspace (lua_State *L);</pre>
+
+<p>
+Returns a pointer to a raw memory area associated with the
+given Lua state.
+The application can use this area for any purpose;
+Lua does not use it for anything.
+
+
+<p>
+Each new thread has this area initialized with a copy
+of the area of the main thread.
+
+
+<p>
+By default, this area has the size of a pointer to void,
+but you can recompile Lua with a different size for this area.
+(See <code>LUA_EXTRASPACE</code> in <code>luaconf.h</code>.)
+
+
+
+
+
+<hr><h3><a name="lua_getglobal"><code>lua_getglobal</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>int lua_getglobal (lua_State *L, const char *name);</pre>
+
+<p>
+Pushes onto the stack the value of the global <code>name</code>.
+Returns the type of that value.
+
+
+
+
+
+<hr><h3><a name="lua_geti"><code>lua_geti</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>int lua_geti (lua_State *L, int index, lua_Integer i);</pre>
+
+<p>
+Pushes onto the stack the value <code>t[i]</code>,
+where <code>t</code> is the value at the given index.
+As in Lua, this function may trigger a metamethod
+for the "index" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_getmetatable"><code>lua_getmetatable</code></a></h3><p>
+<span class="apii">[-0, +(0|1), &ndash;]</span>
+<pre>int lua_getmetatable (lua_State *L, int index);</pre>
+
+<p>
+If the value at the given index has a metatable,
+the function pushes that metatable onto the stack and returns&nbsp;1.
+Otherwise,
+the function returns&nbsp;0 and pushes nothing on the stack.
+
+
+
+
+
+<hr><h3><a name="lua_gettable"><code>lua_gettable</code></a></h3><p>
+<span class="apii">[-1, +1, <em>e</em>]</span>
+<pre>int lua_gettable (lua_State *L, int index);</pre>
+
+<p>
+Pushes onto the stack the value <code>t[k]</code>,
+where <code>t</code> is the value at the given index
+and <code>k</code> is the value at the top of the stack.
+
+
+<p>
+This function pops the key from the stack,
+pushing the resulting value in its place.
+As in Lua, this function may trigger a metamethod
+for the "index" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_gettop"><code>lua_gettop</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_gettop (lua_State *L);</pre>
+
+<p>
+Returns the index of the top element in the stack.
+Because indices start at&nbsp;1,
+this result is equal to the number of elements in the stack;
+in particular, 0&nbsp;means an empty stack.
+
+
+
+
+
+<hr><h3><a name="lua_getuservalue"><code>lua_getuservalue</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int lua_getuservalue (lua_State *L, int index);</pre>
+
+<p>
+Pushes onto the stack the Lua value associated with the full userdata
+at the given index.
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_insert"><code>lua_insert</code></a></h3><p>
+<span class="apii">[-1, +1, &ndash;]</span>
+<pre>void lua_insert (lua_State *L, int index);</pre>
+
+<p>
+Moves the top element into the given valid index,
+shifting up the elements above this index to open space.
+This function cannot be called with a pseudo-index,
+because a pseudo-index is not an actual stack position.
+
+
+
+
+
+<hr><h3><a name="lua_Integer"><code>lua_Integer</code></a></h3>
+<pre>typedef ... lua_Integer;</pre>
+
+<p>
+The type of integers in Lua.
+
+
+<p>
+By default this type is <code>long long</code>,
+(usually a 64-bit two-complement integer),
+but that can be changed to <code>long</code> or <code>int</code>
+(usually a 32-bit two-complement integer).
+(See <code>LUA_INT_TYPE</code> in <code>luaconf.h</code>.)
+
+
+<p>
+Lua also defines the constants
+<a name="pdf-LUA_MININTEGER"><code>LUA_MININTEGER</code></a> and <a name="pdf-LUA_MAXINTEGER"><code>LUA_MAXINTEGER</code></a>,
+with the minimum and the maximum values that fit in this type.
+
+
+
+
+
+<hr><h3><a name="lua_isboolean"><code>lua_isboolean</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isboolean (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a boolean,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_iscfunction"><code>lua_iscfunction</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_iscfunction (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a C&nbsp;function,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isfunction"><code>lua_isfunction</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isfunction (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a function
+(either C or Lua), and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isinteger"><code>lua_isinteger</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isinteger (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is an integer
+(that is, the value is a number and is represented as an integer),
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_islightuserdata"><code>lua_islightuserdata</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_islightuserdata (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a light userdata,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isnil"><code>lua_isnil</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isnil (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is <b>nil</b>,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isnone"><code>lua_isnone</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isnone (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the given index is not valid,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isnoneornil"><code>lua_isnoneornil</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isnoneornil (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the given index is not valid
+or if the value at this index is <b>nil</b>,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isnumber"><code>lua_isnumber</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isnumber (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a number
+or a string convertible to a number,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isstring"><code>lua_isstring</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isstring (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a string
+or a number (which is always convertible to a string),
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_istable"><code>lua_istable</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_istable (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a table,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isthread"><code>lua_isthread</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isthread (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a thread,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isuserdata"><code>lua_isuserdata</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isuserdata (lua_State *L, int index);</pre>
+
+<p>
+Returns 1 if the value at the given index is a userdata
+(either full or light), and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_isyieldable"><code>lua_isyieldable</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_isyieldable (lua_State *L);</pre>
+
+<p>
+Returns 1 if the given coroutine can yield,
+and 0&nbsp;otherwise.
+
+
+
+
+
+<hr><h3><a name="lua_KContext"><code>lua_KContext</code></a></h3>
+<pre>typedef ... lua_KContext;</pre>
+
+<p>
+The type for continuation-function contexts.
+It must be a numeric type.
+This type is defined as <code>intptr_t</code>
+when <code>intptr_t</code> is available,
+so that it can store pointers too.
+Otherwise, it is defined as <code>ptrdiff_t</code>.
+
+
+
+
+
+<hr><h3><a name="lua_KFunction"><code>lua_KFunction</code></a></h3>
+<pre>typedef int (*lua_KFunction) (lua_State *L, int status, lua_KContext ctx);</pre>
+
+<p>
+Type for continuation functions (see <a href="#4.7">&sect;4.7</a>).
+
+
+
+
+
+<hr><h3><a name="lua_len"><code>lua_len</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>void lua_len (lua_State *L, int index);</pre>
+
+<p>
+Returns the length of the value at the given index.
+It is equivalent to the '<code>#</code>' operator in Lua (see <a href="#3.4.7">&sect;3.4.7</a>) and
+may trigger a metamethod for the "length" event (see <a href="#2.4">&sect;2.4</a>).
+The result is pushed on the stack.
+
+
+
+
+
+<hr><h3><a name="lua_load"><code>lua_load</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int lua_load (lua_State *L,
+ lua_Reader reader,
+ void *data,
+ const char *chunkname,
+ const char *mode);</pre>
+
+<p>
+Loads a Lua chunk without running it.
+If there are no errors,
+<code>lua_load</code> pushes the compiled chunk as a Lua
+function on top of the stack.
+Otherwise, it pushes an error message.
+
+
+<p>
+The return values of <code>lua_load</code> are:
+
+<ul>
+
+<li><b><a href="#pdf-LUA_OK"><code>LUA_OK</code></a>: </b> no errors;</li>
+
+<li><b><a name="pdf-LUA_ERRSYNTAX"><code>LUA_ERRSYNTAX</code></a>: </b>
+syntax error during precompilation;</li>
+
+<li><b><a href="#pdf-LUA_ERRMEM"><code>LUA_ERRMEM</code></a>: </b>
+memory allocation (out-of-memory) error;</li>
+
+<li><b><a href="#pdf-LUA_ERRGCMM"><code>LUA_ERRGCMM</code></a>: </b>
+error while running a <code>__gc</code> metamethod.
+(This error has no relation with the chunk being loaded.
+It is generated by the garbage collector.)
+</li>
+
+</ul>
+
+<p>
+The <code>lua_load</code> function uses a user-supplied <code>reader</code> function
+to read the chunk (see <a href="#lua_Reader"><code>lua_Reader</code></a>).
+The <code>data</code> argument is an opaque value passed to the reader function.
+
+
+<p>
+The <code>chunkname</code> argument gives a name to the chunk,
+which is used for error messages and in debug information (see <a href="#4.9">&sect;4.9</a>).
+
+
+<p>
+<code>lua_load</code> automatically detects whether the chunk is text or binary
+and loads it accordingly (see program <code>luac</code>).
+The string <code>mode</code> works as in function <a href="#pdf-load"><code>load</code></a>,
+with the addition that
+a <code>NULL</code> value is equivalent to the string "<code>bt</code>".
+
+
+<p>
+<code>lua_load</code> uses the stack internally,
+so the reader function must always leave the stack
+unmodified when returning.
+
+
+<p>
+If the resulting function has upvalues,
+its first upvalue is set to the value of the global environment
+stored at index <code>LUA_RIDX_GLOBALS</code> in the registry (see <a href="#4.5">&sect;4.5</a>).
+When loading main chunks,
+this upvalue will be the <code>_ENV</code> variable (see <a href="#2.2">&sect;2.2</a>).
+Other upvalues are initialized with <b>nil</b>.
+
+
+
+
+
+<hr><h3><a name="lua_newstate"><code>lua_newstate</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_State *lua_newstate (lua_Alloc f, void *ud);</pre>
+
+<p>
+Creates a new thread running in a new, independent state.
+Returns <code>NULL</code> if it cannot create the thread or the state
+(due to lack of memory).
+The argument <code>f</code> is the allocator function;
+Lua does all memory allocation for this state
+through this function (see <a href="#lua_Alloc"><code>lua_Alloc</code></a>).
+The second argument, <code>ud</code>, is an opaque pointer that Lua
+passes to the allocator in every call.
+
+
+
+
+
+<hr><h3><a name="lua_newtable"><code>lua_newtable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void lua_newtable (lua_State *L);</pre>
+
+<p>
+Creates a new empty table and pushes it onto the stack.
+It is equivalent to <code>lua_createtable(L, 0, 0)</code>.
+
+
+
+
+
+<hr><h3><a name="lua_newthread"><code>lua_newthread</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>lua_State *lua_newthread (lua_State *L);</pre>
+
+<p>
+Creates a new thread, pushes it on the stack,
+and returns a pointer to a <a href="#lua_State"><code>lua_State</code></a> that represents this new thread.
+The new thread returned by this function shares with the original thread
+its global environment,
+but has an independent execution stack.
+
+
+<p>
+There is no explicit function to close or to destroy a thread.
+Threads are subject to garbage collection,
+like any Lua object.
+
+
+
+
+
+<hr><h3><a name="lua_newuserdata"><code>lua_newuserdata</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void *lua_newuserdata (lua_State *L, size_t size);</pre>
+
+<p>
+This function allocates a new block of memory with the given size,
+pushes onto the stack a new full userdata with the block address,
+and returns this address.
+The host program can freely use this memory.
+
+
+
+
+
+<hr><h3><a name="lua_next"><code>lua_next</code></a></h3><p>
+<span class="apii">[-1, +(2|0), <em>e</em>]</span>
+<pre>int lua_next (lua_State *L, int index);</pre>
+
+<p>
+Pops a key from the stack,
+and pushes a key&ndash;value pair from the table at the given index
+(the "next" pair after the given key).
+If there are no more elements in the table,
+then <a href="#lua_next"><code>lua_next</code></a> returns 0 (and pushes nothing).
+
+
+<p>
+A typical traversal looks like this:
+
+<pre>
+ /* table is in the stack at index 't' */
+ lua_pushnil(L); /* first key */
+ while (lua_next(L, t) != 0) {
+ /* uses 'key' (at index -2) and 'value' (at index -1) */
+ printf("%s - %s\n",
+ lua_typename(L, lua_type(L, -2)),
+ lua_typename(L, lua_type(L, -1)));
+ /* removes 'value'; keeps 'key' for next iteration */
+ lua_pop(L, 1);
+ }
+</pre>
+
+<p>
+While traversing a table,
+do not call <a href="#lua_tolstring"><code>lua_tolstring</code></a> directly on a key,
+unless you know that the key is actually a string.
+Recall that <a href="#lua_tolstring"><code>lua_tolstring</code></a> may change
+the value at the given index;
+this confuses the next call to <a href="#lua_next"><code>lua_next</code></a>.
+
+
+<p>
+See function <a href="#pdf-next"><code>next</code></a> for the caveats of modifying
+the table during its traversal.
+
+
+
+
+
+<hr><h3><a name="lua_Number"><code>lua_Number</code></a></h3>
+<pre>typedef ... lua_Number;</pre>
+
+<p>
+The type of floats in Lua.
+
+
+<p>
+By default this type is double,
+but that can be changed to a single float or a long double.
+(See <code>LUA_FLOAT_TYPE</code> in <code>luaconf.h</code>.)
+
+
+
+
+
+<hr><h3><a name="lua_numbertointeger"><code>lua_numbertointeger</code></a></h3>
+<pre>int lua_numbertointeger (lua_Number n, lua_Integer *p);</pre>
+
+<p>
+Converts a Lua float to a Lua integer.
+This macro assumes that <code>n</code> has an integral value.
+If that value is within the range of Lua integers,
+it is converted to an integer and assigned to <code>*p</code>.
+The macro results in a boolean indicating whether the
+conversion was successful.
+(Note that this range test can be tricky to do
+correctly without this macro,
+due to roundings.)
+
+
+<p>
+This macro may evaluate its arguments more than once.
+
+
+
+
+
+<hr><h3><a name="lua_pcall"><code>lua_pcall</code></a></h3><p>
+<span class="apii">[-(nargs + 1), +(nresults|1), &ndash;]</span>
+<pre>int lua_pcall (lua_State *L, int nargs, int nresults, int msgh);</pre>
+
+<p>
+Calls a function in protected mode.
+
+
+<p>
+Both <code>nargs</code> and <code>nresults</code> have the same meaning as
+in <a href="#lua_call"><code>lua_call</code></a>.
+If there are no errors during the call,
+<a href="#lua_pcall"><code>lua_pcall</code></a> behaves exactly like <a href="#lua_call"><code>lua_call</code></a>.
+However, if there is any error,
+<a href="#lua_pcall"><code>lua_pcall</code></a> catches it,
+pushes a single value on the stack (the error object),
+and returns an error code.
+Like <a href="#lua_call"><code>lua_call</code></a>,
+<a href="#lua_pcall"><code>lua_pcall</code></a> always removes the function
+and its arguments from the stack.
+
+
+<p>
+If <code>msgh</code> is 0,
+then the error object returned on the stack
+is exactly the original error object.
+Otherwise, <code>msgh</code> is the stack index of a
+<em>message handler</em>.
+(This index cannot be a pseudo-index.)
+In case of runtime errors,
+this function will be called with the error object
+and its return value will be the object
+returned on the stack by <a href="#lua_pcall"><code>lua_pcall</code></a>.
+
+
+<p>
+Typically, the message handler is used to add more debug
+information to the error object, such as a stack traceback.
+Such information cannot be gathered after the return of <a href="#lua_pcall"><code>lua_pcall</code></a>,
+since by then the stack has unwound.
+
+
+<p>
+The <a href="#lua_pcall"><code>lua_pcall</code></a> function returns one of the following constants
+(defined in <code>lua.h</code>):
+
+<ul>
+
+<li><b><a name="pdf-LUA_OK"><code>LUA_OK</code></a> (0): </b>
+success.</li>
+
+<li><b><a name="pdf-LUA_ERRRUN"><code>LUA_ERRRUN</code></a>: </b>
+a runtime error.
+</li>
+
+<li><b><a name="pdf-LUA_ERRMEM"><code>LUA_ERRMEM</code></a>: </b>
+memory allocation error.
+For such errors, Lua does not call the message handler.
+</li>
+
+<li><b><a name="pdf-LUA_ERRERR"><code>LUA_ERRERR</code></a>: </b>
+error while running the message handler.
+</li>
+
+<li><b><a name="pdf-LUA_ERRGCMM"><code>LUA_ERRGCMM</code></a>: </b>
+error while running a <code>__gc</code> metamethod.
+For such errors, Lua does not call the message handler
+(as this kind of error typically has no relation
+with the function being called).
+</li>
+
+</ul>
+
+
+
+
+<hr><h3><a name="lua_pcallk"><code>lua_pcallk</code></a></h3><p>
+<span class="apii">[-(nargs + 1), +(nresults|1), &ndash;]</span>
+<pre>int lua_pcallk (lua_State *L,
+ int nargs,
+ int nresults,
+ int msgh,
+ lua_KContext ctx,
+ lua_KFunction k);</pre>
+
+<p>
+This function behaves exactly like <a href="#lua_pcall"><code>lua_pcall</code></a>,
+but allows the called function to yield (see <a href="#4.7">&sect;4.7</a>).
+
+
+
+
+
+<hr><h3><a name="lua_pop"><code>lua_pop</code></a></h3><p>
+<span class="apii">[-n, +0, &ndash;]</span>
+<pre>void lua_pop (lua_State *L, int n);</pre>
+
+<p>
+Pops <code>n</code> elements from the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushboolean"><code>lua_pushboolean</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushboolean (lua_State *L, int b);</pre>
+
+<p>
+Pushes a boolean value with value <code>b</code> onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushcclosure"><code>lua_pushcclosure</code></a></h3><p>
+<span class="apii">[-n, +1, <em>m</em>]</span>
+<pre>void lua_pushcclosure (lua_State *L, lua_CFunction fn, int n);</pre>
+
+<p>
+Pushes a new C&nbsp;closure onto the stack.
+
+
+<p>
+When a C&nbsp;function is created,
+it is possible to associate some values with it,
+thus creating a C&nbsp;closure (see <a href="#4.4">&sect;4.4</a>);
+these values are then accessible to the function whenever it is called.
+To associate values with a C&nbsp;function,
+first these values must be pushed onto the stack
+(when there are multiple values, the first value is pushed first).
+Then <a href="#lua_pushcclosure"><code>lua_pushcclosure</code></a>
+is called to create and push the C&nbsp;function onto the stack,
+with the argument <code>n</code> telling how many values will be
+associated with the function.
+<a href="#lua_pushcclosure"><code>lua_pushcclosure</code></a> also pops these values from the stack.
+
+
+<p>
+The maximum value for <code>n</code> is 255.
+
+
+<p>
+When <code>n</code> is zero,
+this function creates a <em>light C&nbsp;function</em>,
+which is just a pointer to the C&nbsp;function.
+In that case, it never raises a memory error.
+
+
+
+
+
+<hr><h3><a name="lua_pushcfunction"><code>lua_pushcfunction</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushcfunction (lua_State *L, lua_CFunction f);</pre>
+
+<p>
+Pushes a C&nbsp;function onto the stack.
+This function receives a pointer to a C&nbsp;function
+and pushes onto the stack a Lua value of type <code>function</code> that,
+when called, invokes the corresponding C&nbsp;function.
+
+
+<p>
+Any function to be callable by Lua must
+follow the correct protocol to receive its parameters
+and return its results (see <a href="#lua_CFunction"><code>lua_CFunction</code></a>).
+
+
+
+
+
+<hr><h3><a name="lua_pushfstring"><code>lua_pushfstring</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>const char *lua_pushfstring (lua_State *L, const char *fmt, ...);</pre>
+
+<p>
+Pushes onto the stack a formatted string
+and returns a pointer to this string.
+It is similar to the ISO&nbsp;C function <code>sprintf</code>,
+but has some important differences:
+
+<ul>
+
+<li>
+You do not have to allocate space for the result:
+the result is a Lua string and Lua takes care of memory allocation
+(and deallocation, through garbage collection).
+</li>
+
+<li>
+The conversion specifiers are quite restricted.
+There are no flags, widths, or precisions.
+The conversion specifiers can only be
+'<code>%%</code>' (inserts the character '<code>%</code>'),
+'<code>%s</code>' (inserts a zero-terminated string, with no size restrictions),
+'<code>%f</code>' (inserts a <a href="#lua_Number"><code>lua_Number</code></a>),
+'<code>%I</code>' (inserts a <a href="#lua_Integer"><code>lua_Integer</code></a>),
+'<code>%p</code>' (inserts a pointer as a hexadecimal numeral),
+'<code>%d</code>' (inserts an <code>int</code>),
+'<code>%c</code>' (inserts an <code>int</code> as a one-byte character), and
+'<code>%U</code>' (inserts a <code>long int</code> as a UTF-8 byte sequence).
+</li>
+
+</ul>
+
+<p>
+Unlike other push functions,
+this function checks for the stack space it needs,
+including the slot for its result.
+
+
+
+
+
+<hr><h3><a name="lua_pushglobaltable"><code>lua_pushglobaltable</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushglobaltable (lua_State *L);</pre>
+
+<p>
+Pushes the global environment onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushinteger"><code>lua_pushinteger</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushinteger (lua_State *L, lua_Integer n);</pre>
+
+<p>
+Pushes an integer with value <code>n</code> onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushlightuserdata"><code>lua_pushlightuserdata</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushlightuserdata (lua_State *L, void *p);</pre>
+
+<p>
+Pushes a light userdata onto the stack.
+
+
+<p>
+Userdata represent C&nbsp;values in Lua.
+A <em>light userdata</em> represents a pointer, a <code>void*</code>.
+It is a value (like a number):
+you do not create it, it has no individual metatable,
+and it is not collected (as it was never created).
+A light userdata is equal to "any"
+light userdata with the same C&nbsp;address.
+
+
+
+
+
+<hr><h3><a name="lua_pushliteral"><code>lua_pushliteral</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>const char *lua_pushliteral (lua_State *L, const char *s);</pre>
+
+<p>
+This macro is equivalent to <a href="#lua_pushstring"><code>lua_pushstring</code></a>,
+but should be used only when <code>s</code> is a literal string.
+
+
+
+
+
+<hr><h3><a name="lua_pushlstring"><code>lua_pushlstring</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>const char *lua_pushlstring (lua_State *L, const char *s, size_t len);</pre>
+
+<p>
+Pushes the string pointed to by <code>s</code> with size <code>len</code>
+onto the stack.
+Lua makes (or reuses) an internal copy of the given string,
+so the memory at <code>s</code> can be freed or reused immediately after
+the function returns.
+The string can contain any binary data,
+including embedded zeros.
+
+
+<p>
+Returns a pointer to the internal copy of the string.
+
+
+
+
+
+<hr><h3><a name="lua_pushnil"><code>lua_pushnil</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushnil (lua_State *L);</pre>
+
+<p>
+Pushes a nil value onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushnumber"><code>lua_pushnumber</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushnumber (lua_State *L, lua_Number n);</pre>
+
+<p>
+Pushes a float with value <code>n</code> onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushstring"><code>lua_pushstring</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>const char *lua_pushstring (lua_State *L, const char *s);</pre>
+
+<p>
+Pushes the zero-terminated string pointed to by <code>s</code>
+onto the stack.
+Lua makes (or reuses) an internal copy of the given string,
+so the memory at <code>s</code> can be freed or reused immediately after
+the function returns.
+
+
+<p>
+Returns a pointer to the internal copy of the string.
+
+
+<p>
+If <code>s</code> is <code>NULL</code>, pushes <b>nil</b> and returns <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_pushthread"><code>lua_pushthread</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int lua_pushthread (lua_State *L);</pre>
+
+<p>
+Pushes the thread represented by <code>L</code> onto the stack.
+Returns 1 if this thread is the main thread of its state.
+
+
+
+
+
+<hr><h3><a name="lua_pushvalue"><code>lua_pushvalue</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>void lua_pushvalue (lua_State *L, int index);</pre>
+
+<p>
+Pushes a copy of the element at the given index
+onto the stack.
+
+
+
+
+
+<hr><h3><a name="lua_pushvfstring"><code>lua_pushvfstring</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>const char *lua_pushvfstring (lua_State *L,
+ const char *fmt,
+ va_list argp);</pre>
+
+<p>
+Equivalent to <a href="#lua_pushfstring"><code>lua_pushfstring</code></a>, except that it receives a <code>va_list</code>
+instead of a variable number of arguments.
+
+
+
+
+
+<hr><h3><a name="lua_rawequal"><code>lua_rawequal</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_rawequal (lua_State *L, int index1, int index2);</pre>
+
+<p>
+Returns 1 if the two values in indices <code>index1</code> and
+<code>index2</code> are primitively equal
+(that is, without calling the <code>__eq</code> metamethod).
+Otherwise returns&nbsp;0.
+Also returns&nbsp;0 if any of the indices are not valid.
+
+
+
+
+
+<hr><h3><a name="lua_rawget"><code>lua_rawget</code></a></h3><p>
+<span class="apii">[-1, +1, &ndash;]</span>
+<pre>int lua_rawget (lua_State *L, int index);</pre>
+
+<p>
+Similar to <a href="#lua_gettable"><code>lua_gettable</code></a>, but does a raw access
+(i.e., without metamethods).
+
+
+
+
+
+<hr><h3><a name="lua_rawgeti"><code>lua_rawgeti</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int lua_rawgeti (lua_State *L, int index, lua_Integer n);</pre>
+
+<p>
+Pushes onto the stack the value <code>t[n]</code>,
+where <code>t</code> is the table at the given index.
+The access is raw,
+that is, it does not invoke the <code>__index</code> metamethod.
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_rawgetp"><code>lua_rawgetp</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int lua_rawgetp (lua_State *L, int index, const void *p);</pre>
+
+<p>
+Pushes onto the stack the value <code>t[k]</code>,
+where <code>t</code> is the table at the given index and
+<code>k</code> is the pointer <code>p</code> represented as a light userdata.
+The access is raw;
+that is, it does not invoke the <code>__index</code> metamethod.
+
+
+<p>
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="lua_rawlen"><code>lua_rawlen</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>size_t lua_rawlen (lua_State *L, int index);</pre>
+
+<p>
+Returns the raw "length" of the value at the given index:
+for strings, this is the string length;
+for tables, this is the result of the length operator ('<code>#</code>')
+with no metamethods;
+for userdata, this is the size of the block of memory allocated
+for the userdata;
+for other values, it is&nbsp;0.
+
+
+
+
+
+<hr><h3><a name="lua_rawset"><code>lua_rawset</code></a></h3><p>
+<span class="apii">[-2, +0, <em>m</em>]</span>
+<pre>void lua_rawset (lua_State *L, int index);</pre>
+
+<p>
+Similar to <a href="#lua_settable"><code>lua_settable</code></a>, but does a raw assignment
+(i.e., without metamethods).
+
+
+
+
+
+<hr><h3><a name="lua_rawseti"><code>lua_rawseti</code></a></h3><p>
+<span class="apii">[-1, +0, <em>m</em>]</span>
+<pre>void lua_rawseti (lua_State *L, int index, lua_Integer i);</pre>
+
+<p>
+Does the equivalent of <code>t[i] = v</code>,
+where <code>t</code> is the table at the given index
+and <code>v</code> is the value at the top of the stack.
+
+
+<p>
+This function pops the value from the stack.
+The assignment is raw,
+that is, it does not invoke the <code>__newindex</code> metamethod.
+
+
+
+
+
+<hr><h3><a name="lua_rawsetp"><code>lua_rawsetp</code></a></h3><p>
+<span class="apii">[-1, +0, <em>m</em>]</span>
+<pre>void lua_rawsetp (lua_State *L, int index, const void *p);</pre>
+
+<p>
+Does the equivalent of <code>t[p] = v</code>,
+where <code>t</code> is the table at the given index,
+<code>p</code> is encoded as a light userdata,
+and <code>v</code> is the value at the top of the stack.
+
+
+<p>
+This function pops the value from the stack.
+The assignment is raw,
+that is, it does not invoke <code>__newindex</code> metamethod.
+
+
+
+
+
+<hr><h3><a name="lua_Reader"><code>lua_Reader</code></a></h3>
+<pre>typedef const char * (*lua_Reader) (lua_State *L,
+ void *data,
+ size_t *size);</pre>
+
+<p>
+The reader function used by <a href="#lua_load"><code>lua_load</code></a>.
+Every time it needs another piece of the chunk,
+<a href="#lua_load"><code>lua_load</code></a> calls the reader,
+passing along its <code>data</code> parameter.
+The reader must return a pointer to a block of memory
+with a new piece of the chunk
+and set <code>size</code> to the block size.
+The block must exist until the reader function is called again.
+To signal the end of the chunk,
+the reader must return <code>NULL</code> or set <code>size</code> to zero.
+The reader function may return pieces of any size greater than zero.
+
+
+
+
+
+<hr><h3><a name="lua_register"><code>lua_register</code></a></h3><p>
+<span class="apii">[-0, +0, <em>e</em>]</span>
+<pre>void lua_register (lua_State *L, const char *name, lua_CFunction f);</pre>
+
+<p>
+Sets the C&nbsp;function <code>f</code> as the new value of global <code>name</code>.
+It is defined as a macro:
+
+<pre>
+ #define lua_register(L,n,f) \
+ (lua_pushcfunction(L, f), lua_setglobal(L, n))
+</pre>
+
+
+
+
+<hr><h3><a name="lua_remove"><code>lua_remove</code></a></h3><p>
+<span class="apii">[-1, +0, &ndash;]</span>
+<pre>void lua_remove (lua_State *L, int index);</pre>
+
+<p>
+Removes the element at the given valid index,
+shifting down the elements above this index to fill the gap.
+This function cannot be called with a pseudo-index,
+because a pseudo-index is not an actual stack position.
+
+
+
+
+
+<hr><h3><a name="lua_replace"><code>lua_replace</code></a></h3><p>
+<span class="apii">[-1, +0, &ndash;]</span>
+<pre>void lua_replace (lua_State *L, int index);</pre>
+
+<p>
+Moves the top element into the given valid index
+without shifting any element
+(therefore replacing the value at that given index),
+and then pops the top element.
+
+
+
+
+
+<hr><h3><a name="lua_resume"><code>lua_resume</code></a></h3><p>
+<span class="apii">[-?, +?, &ndash;]</span>
+<pre>int lua_resume (lua_State *L, lua_State *from, int nargs);</pre>
+
+<p>
+Starts and resumes a coroutine in the given thread <code>L</code>.
+
+
+<p>
+To start a coroutine,
+you push onto the thread stack the main function plus any arguments;
+then you call <a href="#lua_resume"><code>lua_resume</code></a>,
+with <code>nargs</code> being the number of arguments.
+This call returns when the coroutine suspends or finishes its execution.
+When it returns, the stack contains all values passed to <a href="#lua_yield"><code>lua_yield</code></a>,
+or all values returned by the body function.
+<a href="#lua_resume"><code>lua_resume</code></a> returns
+<a href="#pdf-LUA_YIELD"><code>LUA_YIELD</code></a> if the coroutine yields,
+<a href="#pdf-LUA_OK"><code>LUA_OK</code></a> if the coroutine finishes its execution
+without errors,
+or an error code in case of errors (see <a href="#lua_pcall"><code>lua_pcall</code></a>).
+
+
+<p>
+In case of errors,
+the stack is not unwound,
+so you can use the debug API over it.
+The error object is on the top of the stack.
+
+
+<p>
+To resume a coroutine,
+you remove any results from the last <a href="#lua_yield"><code>lua_yield</code></a>,
+put on its stack only the values to
+be passed as results from <code>yield</code>,
+and then call <a href="#lua_resume"><code>lua_resume</code></a>.
+
+
+<p>
+The parameter <code>from</code> represents the coroutine that is resuming <code>L</code>.
+If there is no such coroutine,
+this parameter can be <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_rotate"><code>lua_rotate</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_rotate (lua_State *L, int idx, int n);</pre>
+
+<p>
+Rotates the stack elements between the valid index <code>idx</code>
+and the top of the stack.
+The elements are rotated <code>n</code> positions in the direction of the top,
+for a positive <code>n</code>,
+or <code>-n</code> positions in the direction of the bottom,
+for a negative <code>n</code>.
+The absolute value of <code>n</code> must not be greater than the size
+of the slice being rotated.
+This function cannot be called with a pseudo-index,
+because a pseudo-index is not an actual stack position.
+
+
+
+
+
+<hr><h3><a name="lua_setallocf"><code>lua_setallocf</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_setallocf (lua_State *L, lua_Alloc f, void *ud);</pre>
+
+<p>
+Changes the allocator function of a given state to <code>f</code>
+with user data <code>ud</code>.
+
+
+
+
+
+<hr><h3><a name="lua_setfield"><code>lua_setfield</code></a></h3><p>
+<span class="apii">[-1, +0, <em>e</em>]</span>
+<pre>void lua_setfield (lua_State *L, int index, const char *k);</pre>
+
+<p>
+Does the equivalent to <code>t[k] = v</code>,
+where <code>t</code> is the value at the given index
+and <code>v</code> is the value at the top of the stack.
+
+
+<p>
+This function pops the value from the stack.
+As in Lua, this function may trigger a metamethod
+for the "newindex" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+
+
+
+<hr><h3><a name="lua_setglobal"><code>lua_setglobal</code></a></h3><p>
+<span class="apii">[-1, +0, <em>e</em>]</span>
+<pre>void lua_setglobal (lua_State *L, const char *name);</pre>
+
+<p>
+Pops a value from the stack and
+sets it as the new value of global <code>name</code>.
+
+
+
+
+
+<hr><h3><a name="lua_seti"><code>lua_seti</code></a></h3><p>
+<span class="apii">[-1, +0, <em>e</em>]</span>
+<pre>void lua_seti (lua_State *L, int index, lua_Integer n);</pre>
+
+<p>
+Does the equivalent to <code>t[n] = v</code>,
+where <code>t</code> is the value at the given index
+and <code>v</code> is the value at the top of the stack.
+
+
+<p>
+This function pops the value from the stack.
+As in Lua, this function may trigger a metamethod
+for the "newindex" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+
+
+
+<hr><h3><a name="lua_setmetatable"><code>lua_setmetatable</code></a></h3><p>
+<span class="apii">[-1, +0, &ndash;]</span>
+<pre>void lua_setmetatable (lua_State *L, int index);</pre>
+
+<p>
+Pops a table from the stack and
+sets it as the new metatable for the value at the given index.
+
+
+
+
+
+<hr><h3><a name="lua_settable"><code>lua_settable</code></a></h3><p>
+<span class="apii">[-2, +0, <em>e</em>]</span>
+<pre>void lua_settable (lua_State *L, int index);</pre>
+
+<p>
+Does the equivalent to <code>t[k] = v</code>,
+where <code>t</code> is the value at the given index,
+<code>v</code> is the value at the top of the stack,
+and <code>k</code> is the value just below the top.
+
+
+<p>
+This function pops both the key and the value from the stack.
+As in Lua, this function may trigger a metamethod
+for the "newindex" event (see <a href="#2.4">&sect;2.4</a>).
+
+
+
+
+
+<hr><h3><a name="lua_settop"><code>lua_settop</code></a></h3><p>
+<span class="apii">[-?, +?, &ndash;]</span>
+<pre>void lua_settop (lua_State *L, int index);</pre>
+
+<p>
+Accepts any index, or&nbsp;0,
+and sets the stack top to this index.
+If the new top is larger than the old one,
+then the new elements are filled with <b>nil</b>.
+If <code>index</code> is&nbsp;0, then all stack elements are removed.
+
+
+
+
+
+<hr><h3><a name="lua_setuservalue"><code>lua_setuservalue</code></a></h3><p>
+<span class="apii">[-1, +0, &ndash;]</span>
+<pre>void lua_setuservalue (lua_State *L, int index);</pre>
+
+<p>
+Pops a value from the stack and sets it as
+the new value associated to the full userdata at the given index.
+
+
+
+
+
+<hr><h3><a name="lua_State"><code>lua_State</code></a></h3>
+<pre>typedef struct lua_State lua_State;</pre>
+
+<p>
+An opaque structure that points to a thread and indirectly
+(through the thread) to the whole state of a Lua interpreter.
+The Lua library is fully reentrant:
+it has no global variables.
+All information about a state is accessible through this structure.
+
+
+<p>
+A pointer to this structure must be passed as the first argument to
+every function in the library, except to <a href="#lua_newstate"><code>lua_newstate</code></a>,
+which creates a Lua state from scratch.
+
+
+
+
+
+<hr><h3><a name="lua_status"><code>lua_status</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_status (lua_State *L);</pre>
+
+<p>
+Returns the status of the thread <code>L</code>.
+
+
+<p>
+The status can be 0 (<a href="#pdf-LUA_OK"><code>LUA_OK</code></a>) for a normal thread,
+an error code if the thread finished the execution
+of a <a href="#lua_resume"><code>lua_resume</code></a> with an error,
+or <a name="pdf-LUA_YIELD"><code>LUA_YIELD</code></a> if the thread is suspended.
+
+
+<p>
+You can only call functions in threads with status <a href="#pdf-LUA_OK"><code>LUA_OK</code></a>.
+You can resume threads with status <a href="#pdf-LUA_OK"><code>LUA_OK</code></a>
+(to start a new coroutine) or <a href="#pdf-LUA_YIELD"><code>LUA_YIELD</code></a>
+(to resume a coroutine).
+
+
+
+
+
+<hr><h3><a name="lua_stringtonumber"><code>lua_stringtonumber</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>size_t lua_stringtonumber (lua_State *L, const char *s);</pre>
+
+<p>
+Converts the zero-terminated string <code>s</code> to a number,
+pushes that number into the stack,
+and returns the total size of the string,
+that is, its length plus one.
+The conversion can result in an integer or a float,
+according to the lexical conventions of Lua (see <a href="#3.1">&sect;3.1</a>).
+The string may have leading and trailing spaces and a sign.
+If the string is not a valid numeral,
+returns 0 and pushes nothing.
+(Note that the result can be used as a boolean,
+true if the conversion succeeds.)
+
+
+
+
+
+<hr><h3><a name="lua_toboolean"><code>lua_toboolean</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_toboolean (lua_State *L, int index);</pre>
+
+<p>
+Converts the Lua value at the given index to a C&nbsp;boolean
+value (0&nbsp;or&nbsp;1).
+Like all tests in Lua,
+<a href="#lua_toboolean"><code>lua_toboolean</code></a> returns true for any Lua value
+different from <b>false</b> and <b>nil</b>;
+otherwise it returns false.
+(If you want to accept only actual boolean values,
+use <a href="#lua_isboolean"><code>lua_isboolean</code></a> to test the value's type.)
+
+
+
+
+
+<hr><h3><a name="lua_tocfunction"><code>lua_tocfunction</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_CFunction lua_tocfunction (lua_State *L, int index);</pre>
+
+<p>
+Converts a value at the given index to a C&nbsp;function.
+That value must be a C&nbsp;function;
+otherwise, returns <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_tointeger"><code>lua_tointeger</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Integer lua_tointeger (lua_State *L, int index);</pre>
+
+<p>
+Equivalent to <a href="#lua_tointegerx"><code>lua_tointegerx</code></a> with <code>isnum</code> equal to <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_tointegerx"><code>lua_tointegerx</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Integer lua_tointegerx (lua_State *L, int index, int *isnum);</pre>
+
+<p>
+Converts the Lua value at the given index
+to the signed integral type <a href="#lua_Integer"><code>lua_Integer</code></a>.
+The Lua value must be an integer,
+or a number or string convertible to an integer (see <a href="#3.4.3">&sect;3.4.3</a>);
+otherwise, <code>lua_tointegerx</code> returns&nbsp;0.
+
+
+<p>
+If <code>isnum</code> is not <code>NULL</code>,
+its referent is assigned a boolean value that
+indicates whether the operation succeeded.
+
+
+
+
+
+<hr><h3><a name="lua_tolstring"><code>lua_tolstring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>m</em>]</span>
+<pre>const char *lua_tolstring (lua_State *L, int index, size_t *len);</pre>
+
+<p>
+Converts the Lua value at the given index to a C&nbsp;string.
+If <code>len</code> is not <code>NULL</code>,
+it sets <code>*len</code> with the string length.
+The Lua value must be a string or a number;
+otherwise, the function returns <code>NULL</code>.
+If the value is a number,
+then <code>lua_tolstring</code> also
+<em>changes the actual value in the stack to a string</em>.
+(This change confuses <a href="#lua_next"><code>lua_next</code></a>
+when <code>lua_tolstring</code> is applied to keys during a table traversal.)
+
+
+<p>
+<code>lua_tolstring</code> returns a pointer
+to a string inside the Lua state.
+This string always has a zero ('<code>\0</code>')
+after its last character (as in&nbsp;C),
+but can contain other zeros in its body.
+
+
+<p>
+Because Lua has garbage collection,
+there is no guarantee that the pointer returned by <code>lua_tolstring</code>
+will be valid after the corresponding Lua value is removed from the stack.
+
+
+
+
+
+<hr><h3><a name="lua_tonumber"><code>lua_tonumber</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Number lua_tonumber (lua_State *L, int index);</pre>
+
+<p>
+Equivalent to <a href="#lua_tonumberx"><code>lua_tonumberx</code></a> with <code>isnum</code> equal to <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_tonumberx"><code>lua_tonumberx</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Number lua_tonumberx (lua_State *L, int index, int *isnum);</pre>
+
+<p>
+Converts the Lua value at the given index
+to the C&nbsp;type <a href="#lua_Number"><code>lua_Number</code></a> (see <a href="#lua_Number"><code>lua_Number</code></a>).
+The Lua value must be a number or a string convertible to a number
+(see <a href="#3.4.3">&sect;3.4.3</a>);
+otherwise, <a href="#lua_tonumberx"><code>lua_tonumberx</code></a> returns&nbsp;0.
+
+
+<p>
+If <code>isnum</code> is not <code>NULL</code>,
+its referent is assigned a boolean value that
+indicates whether the operation succeeded.
+
+
+
+
+
+<hr><h3><a name="lua_topointer"><code>lua_topointer</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>const void *lua_topointer (lua_State *L, int index);</pre>
+
+<p>
+Converts the value at the given index to a generic
+C&nbsp;pointer (<code>void*</code>).
+The value can be a userdata, a table, a thread, or a function;
+otherwise, <code>lua_topointer</code> returns <code>NULL</code>.
+Different objects will give different pointers.
+There is no way to convert the pointer back to its original value.
+
+
+<p>
+Typically this function is used only for hashing and debug information.
+
+
+
+
+
+<hr><h3><a name="lua_tostring"><code>lua_tostring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>m</em>]</span>
+<pre>const char *lua_tostring (lua_State *L, int index);</pre>
+
+<p>
+Equivalent to <a href="#lua_tolstring"><code>lua_tolstring</code></a> with <code>len</code> equal to <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_tothread"><code>lua_tothread</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_State *lua_tothread (lua_State *L, int index);</pre>
+
+<p>
+Converts the value at the given index to a Lua thread
+(represented as <code>lua_State*</code>).
+This value must be a thread;
+otherwise, the function returns <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_touserdata"><code>lua_touserdata</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void *lua_touserdata (lua_State *L, int index);</pre>
+
+<p>
+If the value at the given index is a full userdata,
+returns its block address.
+If the value is a light userdata,
+returns its pointer.
+Otherwise, returns <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="lua_type"><code>lua_type</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_type (lua_State *L, int index);</pre>
+
+<p>
+Returns the type of the value in the given valid index,
+or <code>LUA_TNONE</code> for a non-valid (but acceptable) index.
+The types returned by <a href="#lua_type"><code>lua_type</code></a> are coded by the following constants
+defined in <code>lua.h</code>:
+<a name="pdf-LUA_TNIL"><code>LUA_TNIL</code></a> (0),
+<a name="pdf-LUA_TNUMBER"><code>LUA_TNUMBER</code></a>,
+<a name="pdf-LUA_TBOOLEAN"><code>LUA_TBOOLEAN</code></a>,
+<a name="pdf-LUA_TSTRING"><code>LUA_TSTRING</code></a>,
+<a name="pdf-LUA_TTABLE"><code>LUA_TTABLE</code></a>,
+<a name="pdf-LUA_TFUNCTION"><code>LUA_TFUNCTION</code></a>,
+<a name="pdf-LUA_TUSERDATA"><code>LUA_TUSERDATA</code></a>,
+<a name="pdf-LUA_TTHREAD"><code>LUA_TTHREAD</code></a>,
+and
+<a name="pdf-LUA_TLIGHTUSERDATA"><code>LUA_TLIGHTUSERDATA</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_typename"><code>lua_typename</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>const char *lua_typename (lua_State *L, int tp);</pre>
+
+<p>
+Returns the name of the type encoded by the value <code>tp</code>,
+which must be one the values returned by <a href="#lua_type"><code>lua_type</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_Unsigned"><code>lua_Unsigned</code></a></h3>
+<pre>typedef ... lua_Unsigned;</pre>
+
+<p>
+The unsigned version of <a href="#lua_Integer"><code>lua_Integer</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_upvalueindex"><code>lua_upvalueindex</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_upvalueindex (int i);</pre>
+
+<p>
+Returns the pseudo-index that represents the <code>i</code>-th upvalue of
+the running function (see <a href="#4.4">&sect;4.4</a>).
+
+
+
+
+
+<hr><h3><a name="lua_version"><code>lua_version</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>const lua_Number *lua_version (lua_State *L);</pre>
+
+<p>
+Returns the address of the version number
+(a C static variable)
+stored in the Lua core.
+When called with a valid <a href="#lua_State"><code>lua_State</code></a>,
+returns the address of the version used to create that state.
+When called with <code>NULL</code>,
+returns the address of the version running the call.
+
+
+
+
+
+<hr><h3><a name="lua_Writer"><code>lua_Writer</code></a></h3>
+<pre>typedef int (*lua_Writer) (lua_State *L,
+ const void* p,
+ size_t sz,
+ void* ud);</pre>
+
+<p>
+The type of the writer function used by <a href="#lua_dump"><code>lua_dump</code></a>.
+Every time it produces another piece of chunk,
+<a href="#lua_dump"><code>lua_dump</code></a> calls the writer,
+passing along the buffer to be written (<code>p</code>),
+its size (<code>sz</code>),
+and the <code>data</code> parameter supplied to <a href="#lua_dump"><code>lua_dump</code></a>.
+
+
+<p>
+The writer returns an error code:
+0&nbsp;means no errors;
+any other value means an error and stops <a href="#lua_dump"><code>lua_dump</code></a> from
+calling the writer again.
+
+
+
+
+
+<hr><h3><a name="lua_xmove"><code>lua_xmove</code></a></h3><p>
+<span class="apii">[-?, +?, &ndash;]</span>
+<pre>void lua_xmove (lua_State *from, lua_State *to, int n);</pre>
+
+<p>
+Exchange values between different threads of the same state.
+
+
+<p>
+This function pops <code>n</code> values from the stack <code>from</code>,
+and pushes them onto the stack <code>to</code>.
+
+
+
+
+
+<hr><h3><a name="lua_yield"><code>lua_yield</code></a></h3><p>
+<span class="apii">[-?, +?, <em>e</em>]</span>
+<pre>int lua_yield (lua_State *L, int nresults);</pre>
+
+<p>
+This function is equivalent to <a href="#lua_yieldk"><code>lua_yieldk</code></a>,
+but it has no continuation (see <a href="#4.7">&sect;4.7</a>).
+Therefore, when the thread resumes,
+it continues the function that called
+the function calling <code>lua_yield</code>.
+
+
+
+
+
+<hr><h3><a name="lua_yieldk"><code>lua_yieldk</code></a></h3><p>
+<span class="apii">[-?, +?, <em>e</em>]</span>
+<pre>int lua_yieldk (lua_State *L,
+ int nresults,
+ lua_KContext ctx,
+ lua_KFunction k);</pre>
+
+<p>
+Yields a coroutine (thread).
+
+
+<p>
+When a C&nbsp;function calls <a href="#lua_yieldk"><code>lua_yieldk</code></a>,
+the running coroutine suspends its execution,
+and the call to <a href="#lua_resume"><code>lua_resume</code></a> that started this coroutine returns.
+The parameter <code>nresults</code> is the number of values from the stack
+that will be passed as results to <a href="#lua_resume"><code>lua_resume</code></a>.
+
+
+<p>
+When the coroutine is resumed again,
+Lua calls the given continuation function <code>k</code> to continue
+the execution of the C&nbsp;function that yielded (see <a href="#4.7">&sect;4.7</a>).
+This continuation function receives the same stack
+from the previous function,
+with the <code>n</code> results removed and
+replaced by the arguments passed to <a href="#lua_resume"><code>lua_resume</code></a>.
+Moreover,
+the continuation function receives the value <code>ctx</code>
+that was passed to <a href="#lua_yieldk"><code>lua_yieldk</code></a>.
+
+
+<p>
+Usually, this function does not return;
+when the coroutine eventually resumes,
+it continues executing the continuation function.
+However, there is one special case,
+which is when this function is called
+from inside a line or a count hook (see <a href="#4.9">&sect;4.9</a>).
+In that case, <code>lua_yieldk</code> should be called with no continuation
+(probably in the form of <a href="#lua_yield"><code>lua_yield</code></a>) and no results,
+and the hook should return immediately after the call.
+Lua will yield and,
+when the coroutine resumes again,
+it will continue the normal execution
+of the (Lua) function that triggered the hook.
+
+
+<p>
+This function can raise an error if it is called from a thread
+with a pending C call with no continuation function,
+or it is called from a thread that is not running inside a resume
+(e.g., the main thread).
+
+
+
+
+
+
+
+<h2>4.9 &ndash; <a name="4.9">The Debug Interface</a></h2>
+
+<p>
+Lua has no built-in debugging facilities.
+Instead, it offers a special interface
+by means of functions and <em>hooks</em>.
+This interface allows the construction of different
+kinds of debuggers, profilers, and other tools
+that need "inside information" from the interpreter.
+
+
+
+<hr><h3><a name="lua_Debug"><code>lua_Debug</code></a></h3>
+<pre>typedef struct lua_Debug {
+ int event;
+ const char *name; /* (n) */
+ const char *namewhat; /* (n) */
+ const char *what; /* (S) */
+ const char *source; /* (S) */
+ int currentline; /* (l) */
+ int linedefined; /* (S) */
+ int lastlinedefined; /* (S) */
+ unsigned char nups; /* (u) number of upvalues */
+ unsigned char nparams; /* (u) number of parameters */
+ char isvararg; /* (u) */
+ char istailcall; /* (t) */
+ char short_src[LUA_IDSIZE]; /* (S) */
+ /* private part */
+ <em>other fields</em>
+} lua_Debug;</pre>
+
+<p>
+A structure used to carry different pieces of
+information about a function or an activation record.
+<a href="#lua_getstack"><code>lua_getstack</code></a> fills only the private part
+of this structure, for later use.
+To fill the other fields of <a href="#lua_Debug"><code>lua_Debug</code></a> with useful information,
+call <a href="#lua_getinfo"><code>lua_getinfo</code></a>.
+
+
+<p>
+The fields of <a href="#lua_Debug"><code>lua_Debug</code></a> have the following meaning:
+
+<ul>
+
+<li><b><code>source</code>: </b>
+the name of the chunk that created the function.
+If <code>source</code> starts with a '<code>@</code>',
+it means that the function was defined in a file where
+the file name follows the '<code>@</code>'.
+If <code>source</code> starts with a '<code>=</code>',
+the remainder of its contents describe the source in a user-dependent manner.
+Otherwise,
+the function was defined in a string where
+<code>source</code> is that string.
+</li>
+
+<li><b><code>short_src</code>: </b>
+a "printable" version of <code>source</code>, to be used in error messages.
+</li>
+
+<li><b><code>linedefined</code>: </b>
+the line number where the definition of the function starts.
+</li>
+
+<li><b><code>lastlinedefined</code>: </b>
+the line number where the definition of the function ends.
+</li>
+
+<li><b><code>what</code>: </b>
+the string <code>"Lua"</code> if the function is a Lua function,
+<code>"C"</code> if it is a C&nbsp;function,
+<code>"main"</code> if it is the main part of a chunk.
+</li>
+
+<li><b><code>currentline</code>: </b>
+the current line where the given function is executing.
+When no line information is available,
+<code>currentline</code> is set to -1.
+</li>
+
+<li><b><code>name</code>: </b>
+a reasonable name for the given function.
+Because functions in Lua are first-class values,
+they do not have a fixed name:
+some functions can be the value of multiple global variables,
+while others can be stored only in a table field.
+The <code>lua_getinfo</code> function checks how the function was
+called to find a suitable name.
+If it cannot find a name,
+then <code>name</code> is set to <code>NULL</code>.
+</li>
+
+<li><b><code>namewhat</code>: </b>
+explains the <code>name</code> field.
+The value of <code>namewhat</code> can be
+<code>"global"</code>, <code>"local"</code>, <code>"method"</code>,
+<code>"field"</code>, <code>"upvalue"</code>, or <code>""</code> (the empty string),
+according to how the function was called.
+(Lua uses the empty string when no other option seems to apply.)
+</li>
+
+<li><b><code>istailcall</code>: </b>
+true if this function invocation was called by a tail call.
+In this case, the caller of this level is not in the stack.
+</li>
+
+<li><b><code>nups</code>: </b>
+the number of upvalues of the function.
+</li>
+
+<li><b><code>nparams</code>: </b>
+the number of fixed parameters of the function
+(always 0&nbsp;for C&nbsp;functions).
+</li>
+
+<li><b><code>isvararg</code>: </b>
+true if the function is a vararg function
+(always true for C&nbsp;functions).
+</li>
+
+</ul>
+
+
+
+
+<hr><h3><a name="lua_gethook"><code>lua_gethook</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_Hook lua_gethook (lua_State *L);</pre>
+
+<p>
+Returns the current hook function.
+
+
+
+
+
+<hr><h3><a name="lua_gethookcount"><code>lua_gethookcount</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_gethookcount (lua_State *L);</pre>
+
+<p>
+Returns the current hook count.
+
+
+
+
+
+<hr><h3><a name="lua_gethookmask"><code>lua_gethookmask</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_gethookmask (lua_State *L);</pre>
+
+<p>
+Returns the current hook mask.
+
+
+
+
+
+<hr><h3><a name="lua_getinfo"><code>lua_getinfo</code></a></h3><p>
+<span class="apii">[-(0|1), +(0|1|2), <em>e</em>]</span>
+<pre>int lua_getinfo (lua_State *L, const char *what, lua_Debug *ar);</pre>
+
+<p>
+Gets information about a specific function or function invocation.
+
+
+<p>
+To get information about a function invocation,
+the parameter <code>ar</code> must be a valid activation record that was
+filled by a previous call to <a href="#lua_getstack"><code>lua_getstack</code></a> or
+given as argument to a hook (see <a href="#lua_Hook"><code>lua_Hook</code></a>).
+
+
+<p>
+To get information about a function you push it onto the stack
+and start the <code>what</code> string with the character '<code>&gt;</code>'.
+(In that case,
+<code>lua_getinfo</code> pops the function from the top of the stack.)
+For instance, to know in which line a function <code>f</code> was defined,
+you can write the following code:
+
+<pre>
+ lua_Debug ar;
+ lua_getglobal(L, "f"); /* get global 'f' */
+ lua_getinfo(L, "&gt;S", &amp;ar);
+ printf("%d\n", ar.linedefined);
+</pre>
+
+<p>
+Each character in the string <code>what</code>
+selects some fields of the structure <code>ar</code> to be filled or
+a value to be pushed on the stack:
+
+<ul>
+
+<li><b>'<code>n</code>': </b> fills in the field <code>name</code> and <code>namewhat</code>;
+</li>
+
+<li><b>'<code>S</code>': </b>
+fills in the fields <code>source</code>, <code>short_src</code>,
+<code>linedefined</code>, <code>lastlinedefined</code>, and <code>what</code>;
+</li>
+
+<li><b>'<code>l</code>': </b> fills in the field <code>currentline</code>;
+</li>
+
+<li><b>'<code>t</code>': </b> fills in the field <code>istailcall</code>;
+</li>
+
+<li><b>'<code>u</code>': </b> fills in the fields
+<code>nups</code>, <code>nparams</code>, and <code>isvararg</code>;
+</li>
+
+<li><b>'<code>f</code>': </b>
+pushes onto the stack the function that is
+running at the given level;
+</li>
+
+<li><b>'<code>L</code>': </b>
+pushes onto the stack a table whose indices are the
+numbers of the lines that are valid on the function.
+(A <em>valid line</em> is a line with some associated code,
+that is, a line where you can put a break point.
+Non-valid lines include empty lines and comments.)
+
+
+<p>
+If this option is given together with option '<code>f</code>',
+its table is pushed after the function.
+</li>
+
+</ul>
+
+<p>
+This function returns 0 on error
+(for instance, an invalid option in <code>what</code>).
+
+
+
+
+
+<hr><h3><a name="lua_getlocal"><code>lua_getlocal</code></a></h3><p>
+<span class="apii">[-0, +(0|1), &ndash;]</span>
+<pre>const char *lua_getlocal (lua_State *L, const lua_Debug *ar, int n);</pre>
+
+<p>
+Gets information about a local variable of
+a given activation record or a given function.
+
+
+<p>
+In the first case,
+the parameter <code>ar</code> must be a valid activation record that was
+filled by a previous call to <a href="#lua_getstack"><code>lua_getstack</code></a> or
+given as argument to a hook (see <a href="#lua_Hook"><code>lua_Hook</code></a>).
+The index <code>n</code> selects which local variable to inspect;
+see <a href="#pdf-debug.getlocal"><code>debug.getlocal</code></a> for details about variable indices
+and names.
+
+
+<p>
+<a href="#lua_getlocal"><code>lua_getlocal</code></a> pushes the variable's value onto the stack
+and returns its name.
+
+
+<p>
+In the second case, <code>ar</code> must be <code>NULL</code> and the function
+to be inspected must be at the top of the stack.
+In this case, only parameters of Lua functions are visible
+(as there is no information about what variables are active)
+and no values are pushed onto the stack.
+
+
+<p>
+Returns <code>NULL</code> (and pushes nothing)
+when the index is greater than
+the number of active local variables.
+
+
+
+
+
+<hr><h3><a name="lua_getstack"><code>lua_getstack</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>int lua_getstack (lua_State *L, int level, lua_Debug *ar);</pre>
+
+<p>
+Gets information about the interpreter runtime stack.
+
+
+<p>
+This function fills parts of a <a href="#lua_Debug"><code>lua_Debug</code></a> structure with
+an identification of the <em>activation record</em>
+of the function executing at a given level.
+Level&nbsp;0 is the current running function,
+whereas level <em>n+1</em> is the function that has called level <em>n</em>
+(except for tail calls, which do not count on the stack).
+When there are no errors, <a href="#lua_getstack"><code>lua_getstack</code></a> returns 1;
+when called with a level greater than the stack depth,
+it returns 0.
+
+
+
+
+
+<hr><h3><a name="lua_getupvalue"><code>lua_getupvalue</code></a></h3><p>
+<span class="apii">[-0, +(0|1), &ndash;]</span>
+<pre>const char *lua_getupvalue (lua_State *L, int funcindex, int n);</pre>
+
+<p>
+Gets information about the <code>n</code>-th upvalue
+of the closure at index <code>funcindex</code>.
+It pushes the upvalue's value onto the stack
+and returns its name.
+Returns <code>NULL</code> (and pushes nothing)
+when the index <code>n</code> is greater than the number of upvalues.
+
+
+<p>
+For C&nbsp;functions, this function uses the empty string <code>""</code>
+as a name for all upvalues.
+(For Lua functions,
+upvalues are the external local variables that the function uses,
+and that are consequently included in its closure.)
+
+
+<p>
+Upvalues have no particular order,
+as they are active through the whole function.
+They are numbered in an arbitrary order.
+
+
+
+
+
+<hr><h3><a name="lua_Hook"><code>lua_Hook</code></a></h3>
+<pre>typedef void (*lua_Hook) (lua_State *L, lua_Debug *ar);</pre>
+
+<p>
+Type for debugging hook functions.
+
+
+<p>
+Whenever a hook is called, its <code>ar</code> argument has its field
+<code>event</code> set to the specific event that triggered the hook.
+Lua identifies these events with the following constants:
+<a name="pdf-LUA_HOOKCALL"><code>LUA_HOOKCALL</code></a>, <a name="pdf-LUA_HOOKRET"><code>LUA_HOOKRET</code></a>,
+<a name="pdf-LUA_HOOKTAILCALL"><code>LUA_HOOKTAILCALL</code></a>, <a name="pdf-LUA_HOOKLINE"><code>LUA_HOOKLINE</code></a>,
+and <a name="pdf-LUA_HOOKCOUNT"><code>LUA_HOOKCOUNT</code></a>.
+Moreover, for line events, the field <code>currentline</code> is also set.
+To get the value of any other field in <code>ar</code>,
+the hook must call <a href="#lua_getinfo"><code>lua_getinfo</code></a>.
+
+
+<p>
+For call events, <code>event</code> can be <code>LUA_HOOKCALL</code>,
+the normal value, or <code>LUA_HOOKTAILCALL</code>, for a tail call;
+in this case, there will be no corresponding return event.
+
+
+<p>
+While Lua is running a hook, it disables other calls to hooks.
+Therefore, if a hook calls back Lua to execute a function or a chunk,
+this execution occurs without any calls to hooks.
+
+
+<p>
+Hook functions cannot have continuations,
+that is, they cannot call <a href="#lua_yieldk"><code>lua_yieldk</code></a>,
+<a href="#lua_pcallk"><code>lua_pcallk</code></a>, or <a href="#lua_callk"><code>lua_callk</code></a> with a non-null <code>k</code>.
+
+
+<p>
+Hook functions can yield under the following conditions:
+Only count and line events can yield;
+to yield, a hook function must finish its execution
+calling <a href="#lua_yield"><code>lua_yield</code></a> with <code>nresults</code> equal to zero
+(that is, with no values).
+
+
+
+
+
+<hr><h3><a name="lua_sethook"><code>lua_sethook</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_sethook (lua_State *L, lua_Hook f, int mask, int count);</pre>
+
+<p>
+Sets the debugging hook function.
+
+
+<p>
+Argument <code>f</code> is the hook function.
+<code>mask</code> specifies on which events the hook will be called:
+it is formed by a bitwise OR of the constants
+<a name="pdf-LUA_MASKCALL"><code>LUA_MASKCALL</code></a>,
+<a name="pdf-LUA_MASKRET"><code>LUA_MASKRET</code></a>,
+<a name="pdf-LUA_MASKLINE"><code>LUA_MASKLINE</code></a>,
+and <a name="pdf-LUA_MASKCOUNT"><code>LUA_MASKCOUNT</code></a>.
+The <code>count</code> argument is only meaningful when the mask
+includes <code>LUA_MASKCOUNT</code>.
+For each event, the hook is called as explained below:
+
+<ul>
+
+<li><b>The call hook: </b> is called when the interpreter calls a function.
+The hook is called just after Lua enters the new function,
+before the function gets its arguments.
+</li>
+
+<li><b>The return hook: </b> is called when the interpreter returns from a function.
+The hook is called just before Lua leaves the function.
+There is no standard way to access the values
+to be returned by the function.
+</li>
+
+<li><b>The line hook: </b> is called when the interpreter is about to
+start the execution of a new line of code,
+or when it jumps back in the code (even to the same line).
+(This event only happens while Lua is executing a Lua function.)
+</li>
+
+<li><b>The count hook: </b> is called after the interpreter executes every
+<code>count</code> instructions.
+(This event only happens while Lua is executing a Lua function.)
+</li>
+
+</ul>
+
+<p>
+A hook is disabled by setting <code>mask</code> to zero.
+
+
+
+
+
+<hr><h3><a name="lua_setlocal"><code>lua_setlocal</code></a></h3><p>
+<span class="apii">[-(0|1), +0, &ndash;]</span>
+<pre>const char *lua_setlocal (lua_State *L, const lua_Debug *ar, int n);</pre>
+
+<p>
+Sets the value of a local variable of a given activation record.
+It assigns the value at the top of the stack
+to the variable and returns its name.
+It also pops the value from the stack.
+
+
+<p>
+Returns <code>NULL</code> (and pops nothing)
+when the index is greater than
+the number of active local variables.
+
+
+<p>
+Parameters <code>ar</code> and <code>n</code> are as in function <a href="#lua_getlocal"><code>lua_getlocal</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_setupvalue"><code>lua_setupvalue</code></a></h3><p>
+<span class="apii">[-(0|1), +0, &ndash;]</span>
+<pre>const char *lua_setupvalue (lua_State *L, int funcindex, int n);</pre>
+
+<p>
+Sets the value of a closure's upvalue.
+It assigns the value at the top of the stack
+to the upvalue and returns its name.
+It also pops the value from the stack.
+
+
+<p>
+Returns <code>NULL</code> (and pops nothing)
+when the index <code>n</code> is greater than the number of upvalues.
+
+
+<p>
+Parameters <code>funcindex</code> and <code>n</code> are as in function <a href="#lua_getupvalue"><code>lua_getupvalue</code></a>.
+
+
+
+
+
+<hr><h3><a name="lua_upvalueid"><code>lua_upvalueid</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void *lua_upvalueid (lua_State *L, int funcindex, int n);</pre>
+
+<p>
+Returns a unique identifier for the upvalue numbered <code>n</code>
+from the closure at index <code>funcindex</code>.
+
+
+<p>
+These unique identifiers allow a program to check whether different
+closures share upvalues.
+Lua closures that share an upvalue
+(that is, that access a same external local variable)
+will return identical ids for those upvalue indices.
+
+
+<p>
+Parameters <code>funcindex</code> and <code>n</code> are as in function <a href="#lua_getupvalue"><code>lua_getupvalue</code></a>,
+but <code>n</code> cannot be greater than the number of upvalues.
+
+
+
+
+
+<hr><h3><a name="lua_upvaluejoin"><code>lua_upvaluejoin</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void lua_upvaluejoin (lua_State *L, int funcindex1, int n1,
+ int funcindex2, int n2);</pre>
+
+<p>
+Make the <code>n1</code>-th upvalue of the Lua closure at index <code>funcindex1</code>
+refer to the <code>n2</code>-th upvalue of the Lua closure at index <code>funcindex2</code>.
+
+
+
+
+
+
+
+<h1>5 &ndash; <a name="5">The Auxiliary Library</a></h1>
+
+<p>
+
+The <em>auxiliary library</em> provides several convenient functions
+to interface C with Lua.
+While the basic API provides the primitive functions for all
+interactions between C and Lua,
+the auxiliary library provides higher-level functions for some
+common tasks.
+
+
+<p>
+All functions and types from the auxiliary library
+are defined in header file <code>lauxlib.h</code> and
+have a prefix <code>luaL_</code>.
+
+
+<p>
+All functions in the auxiliary library are built on
+top of the basic API,
+and so they provide nothing that cannot be done with that API.
+Nevertheless, the use of the auxiliary library ensures
+more consistency to your code.
+
+
+<p>
+Several functions in the auxiliary library use internally some
+extra stack slots.
+When a function in the auxiliary library uses less than five slots,
+it does not check the stack size;
+it simply assumes that there are enough slots.
+
+
+<p>
+Several functions in the auxiliary library are used to
+check C&nbsp;function arguments.
+Because the error message is formatted for arguments
+(e.g., "<code>bad argument #1</code>"),
+you should not use these functions for other stack values.
+
+
+<p>
+Functions called <code>luaL_check*</code>
+always raise an error if the check is not satisfied.
+
+
+
+<h2>5.1 &ndash; <a name="5.1">Functions and Types</a></h2>
+
+<p>
+Here we list all functions and types from the auxiliary library
+in alphabetical order.
+
+
+
+<hr><h3><a name="luaL_addchar"><code>luaL_addchar</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>void luaL_addchar (luaL_Buffer *B, char c);</pre>
+
+<p>
+Adds the byte <code>c</code> to the buffer <code>B</code>
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_addlstring"><code>luaL_addlstring</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>void luaL_addlstring (luaL_Buffer *B, const char *s, size_t l);</pre>
+
+<p>
+Adds the string pointed to by <code>s</code> with length <code>l</code> to
+the buffer <code>B</code>
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+The string can contain embedded zeros.
+
+
+
+
+
+<hr><h3><a name="luaL_addsize"><code>luaL_addsize</code></a></h3><p>
+<span class="apii">[-?, +?, &ndash;]</span>
+<pre>void luaL_addsize (luaL_Buffer *B, size_t n);</pre>
+
+<p>
+Adds to the buffer <code>B</code> (see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>)
+a string of length <code>n</code> previously copied to the
+buffer area (see <a href="#luaL_prepbuffer"><code>luaL_prepbuffer</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_addstring"><code>luaL_addstring</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>void luaL_addstring (luaL_Buffer *B, const char *s);</pre>
+
+<p>
+Adds the zero-terminated string pointed to by <code>s</code>
+to the buffer <code>B</code>
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_addvalue"><code>luaL_addvalue</code></a></h3><p>
+<span class="apii">[-1, +?, <em>m</em>]</span>
+<pre>void luaL_addvalue (luaL_Buffer *B);</pre>
+
+<p>
+Adds the value at the top of the stack
+to the buffer <code>B</code>
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+Pops the value.
+
+
+<p>
+This is the only function on string buffers that can (and must)
+be called with an extra element on the stack,
+which is the value to be added to the buffer.
+
+
+
+
+
+<hr><h3><a name="luaL_argcheck"><code>luaL_argcheck</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void luaL_argcheck (lua_State *L,
+ int cond,
+ int arg,
+ const char *extramsg);</pre>
+
+<p>
+Checks whether <code>cond</code> is true.
+If it is not, raises an error with a standard message (see <a href="#luaL_argerror"><code>luaL_argerror</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_argerror"><code>luaL_argerror</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>int luaL_argerror (lua_State *L, int arg, const char *extramsg);</pre>
+
+<p>
+Raises an error reporting a problem with argument <code>arg</code>
+of the C&nbsp;function that called it,
+using a standard message
+that includes <code>extramsg</code> as a comment:
+
+<pre>
+ bad argument #<em>arg</em> to '<em>funcname</em>' (<em>extramsg</em>)
+</pre><p>
+This function never returns.
+
+
+
+
+
+<hr><h3><a name="luaL_Buffer"><code>luaL_Buffer</code></a></h3>
+<pre>typedef struct luaL_Buffer luaL_Buffer;</pre>
+
+<p>
+Type for a <em>string buffer</em>.
+
+
+<p>
+A string buffer allows C&nbsp;code to build Lua strings piecemeal.
+Its pattern of use is as follows:
+
+<ul>
+
+<li>First declare a variable <code>b</code> of type <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>.</li>
+
+<li>Then initialize it with a call <code>luaL_buffinit(L, &amp;b)</code>.</li>
+
+<li>
+Then add string pieces to the buffer calling any of
+the <code>luaL_add*</code> functions.
+</li>
+
+<li>
+Finish by calling <code>luaL_pushresult(&amp;b)</code>.
+This call leaves the final string on the top of the stack.
+</li>
+
+</ul>
+
+<p>
+If you know beforehand the total size of the resulting string,
+you can use the buffer like this:
+
+<ul>
+
+<li>First declare a variable <code>b</code> of type <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>.</li>
+
+<li>Then initialize it and preallocate a space of
+size <code>sz</code> with a call <code>luaL_buffinitsize(L, &amp;b, sz)</code>.</li>
+
+<li>Then copy the string into that space.</li>
+
+<li>
+Finish by calling <code>luaL_pushresultsize(&amp;b, sz)</code>,
+where <code>sz</code> is the total size of the resulting string
+copied into that space.
+</li>
+
+</ul>
+
+<p>
+During its normal operation,
+a string buffer uses a variable number of stack slots.
+So, while using a buffer, you cannot assume that you know where
+the top of the stack is.
+You can use the stack between successive calls to buffer operations
+as long as that use is balanced;
+that is,
+when you call a buffer operation,
+the stack is at the same level
+it was immediately after the previous buffer operation.
+(The only exception to this rule is <a href="#luaL_addvalue"><code>luaL_addvalue</code></a>.)
+After calling <a href="#luaL_pushresult"><code>luaL_pushresult</code></a> the stack is back to its
+level when the buffer was initialized,
+plus the final string on its top.
+
+
+
+
+
+<hr><h3><a name="luaL_buffinit"><code>luaL_buffinit</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void luaL_buffinit (lua_State *L, luaL_Buffer *B);</pre>
+
+<p>
+Initializes a buffer <code>B</code>.
+This function does not allocate any space;
+the buffer must be declared as a variable
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_buffinitsize"><code>luaL_buffinitsize</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>char *luaL_buffinitsize (lua_State *L, luaL_Buffer *B, size_t sz);</pre>
+
+<p>
+Equivalent to the sequence
+<a href="#luaL_buffinit"><code>luaL_buffinit</code></a>, <a href="#luaL_prepbuffsize"><code>luaL_prepbuffsize</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_callmeta"><code>luaL_callmeta</code></a></h3><p>
+<span class="apii">[-0, +(0|1), <em>e</em>]</span>
+<pre>int luaL_callmeta (lua_State *L, int obj, const char *e);</pre>
+
+<p>
+Calls a metamethod.
+
+
+<p>
+If the object at index <code>obj</code> has a metatable and this
+metatable has a field <code>e</code>,
+this function calls this field passing the object as its only argument.
+In this case this function returns true and pushes onto the
+stack the value returned by the call.
+If there is no metatable or no metamethod,
+this function returns false (without pushing any value on the stack).
+
+
+
+
+
+<hr><h3><a name="luaL_checkany"><code>luaL_checkany</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void luaL_checkany (lua_State *L, int arg);</pre>
+
+<p>
+Checks whether the function has an argument
+of any type (including <b>nil</b>) at position <code>arg</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_checkinteger"><code>luaL_checkinteger</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>lua_Integer luaL_checkinteger (lua_State *L, int arg);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is an integer
+(or can be converted to an integer)
+and returns this integer cast to a <a href="#lua_Integer"><code>lua_Integer</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_checklstring"><code>luaL_checklstring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>const char *luaL_checklstring (lua_State *L, int arg, size_t *l);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is a string
+and returns this string;
+if <code>l</code> is not <code>NULL</code> fills <code>*l</code>
+with the string's length.
+
+
+<p>
+This function uses <a href="#lua_tolstring"><code>lua_tolstring</code></a> to get its result,
+so all conversions and caveats of that function apply here.
+
+
+
+
+
+<hr><h3><a name="luaL_checknumber"><code>luaL_checknumber</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>lua_Number luaL_checknumber (lua_State *L, int arg);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is a number
+and returns this number.
+
+
+
+
+
+<hr><h3><a name="luaL_checkoption"><code>luaL_checkoption</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>int luaL_checkoption (lua_State *L,
+ int arg,
+ const char *def,
+ const char *const lst[]);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is a string and
+searches for this string in the array <code>lst</code>
+(which must be NULL-terminated).
+Returns the index in the array where the string was found.
+Raises an error if the argument is not a string or
+if the string cannot be found.
+
+
+<p>
+If <code>def</code> is not <code>NULL</code>,
+the function uses <code>def</code> as a default value when
+there is no argument <code>arg</code> or when this argument is <b>nil</b>.
+
+
+<p>
+This is a useful function for mapping strings to C&nbsp;enums.
+(The usual convention in Lua libraries is
+to use strings instead of numbers to select options.)
+
+
+
+
+
+<hr><h3><a name="luaL_checkstack"><code>luaL_checkstack</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void luaL_checkstack (lua_State *L, int sz, const char *msg);</pre>
+
+<p>
+Grows the stack size to <code>top + sz</code> elements,
+raising an error if the stack cannot grow to that size.
+<code>msg</code> is an additional text to go into the error message
+(or <code>NULL</code> for no additional text).
+
+
+
+
+
+<hr><h3><a name="luaL_checkstring"><code>luaL_checkstring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>const char *luaL_checkstring (lua_State *L, int arg);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is a string
+and returns this string.
+
+
+<p>
+This function uses <a href="#lua_tolstring"><code>lua_tolstring</code></a> to get its result,
+so all conversions and caveats of that function apply here.
+
+
+
+
+
+<hr><h3><a name="luaL_checktype"><code>luaL_checktype</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void luaL_checktype (lua_State *L, int arg, int t);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> has type <code>t</code>.
+See <a href="#lua_type"><code>lua_type</code></a> for the encoding of types for <code>t</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_checkudata"><code>luaL_checkudata</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void *luaL_checkudata (lua_State *L, int arg, const char *tname);</pre>
+
+<p>
+Checks whether the function argument <code>arg</code> is a userdata
+of the type <code>tname</code> (see <a href="#luaL_newmetatable"><code>luaL_newmetatable</code></a>) and
+returns the userdata address (see <a href="#lua_touserdata"><code>lua_touserdata</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_checkversion"><code>luaL_checkversion</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>void luaL_checkversion (lua_State *L);</pre>
+
+<p>
+Checks whether the core running the call,
+the core that created the Lua state,
+and the code making the call are all using the same version of Lua.
+Also checks whether the core running the call
+and the core that created the Lua state
+are using the same address space.
+
+
+
+
+
+<hr><h3><a name="luaL_dofile"><code>luaL_dofile</code></a></h3><p>
+<span class="apii">[-0, +?, <em>e</em>]</span>
+<pre>int luaL_dofile (lua_State *L, const char *filename);</pre>
+
+<p>
+Loads and runs the given file.
+It is defined as the following macro:
+
+<pre>
+ (luaL_loadfile(L, filename) || lua_pcall(L, 0, LUA_MULTRET, 0))
+</pre><p>
+It returns false if there are no errors
+or true in case of errors.
+
+
+
+
+
+<hr><h3><a name="luaL_dostring"><code>luaL_dostring</code></a></h3><p>
+<span class="apii">[-0, +?, &ndash;]</span>
+<pre>int luaL_dostring (lua_State *L, const char *str);</pre>
+
+<p>
+Loads and runs the given string.
+It is defined as the following macro:
+
+<pre>
+ (luaL_loadstring(L, str) || lua_pcall(L, 0, LUA_MULTRET, 0))
+</pre><p>
+It returns false if there are no errors
+or true in case of errors.
+
+
+
+
+
+<hr><h3><a name="luaL_error"><code>luaL_error</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>int luaL_error (lua_State *L, const char *fmt, ...);</pre>
+
+<p>
+Raises an error.
+The error message format is given by <code>fmt</code>
+plus any extra arguments,
+following the same rules of <a href="#lua_pushfstring"><code>lua_pushfstring</code></a>.
+It also adds at the beginning of the message the file name and
+the line number where the error occurred,
+if this information is available.
+
+
+<p>
+This function never returns,
+but it is an idiom to use it in C&nbsp;functions
+as <code>return luaL_error(<em>args</em>)</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_execresult"><code>luaL_execresult</code></a></h3><p>
+<span class="apii">[-0, +3, <em>m</em>]</span>
+<pre>int luaL_execresult (lua_State *L, int stat);</pre>
+
+<p>
+This function produces the return values for
+process-related functions in the standard library
+(<a href="#pdf-os.execute"><code>os.execute</code></a> and <a href="#pdf-io.close"><code>io.close</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_fileresult"><code>luaL_fileresult</code></a></h3><p>
+<span class="apii">[-0, +(1|3), <em>m</em>]</span>
+<pre>int luaL_fileresult (lua_State *L, int stat, const char *fname);</pre>
+
+<p>
+This function produces the return values for
+file-related functions in the standard library
+(<a href="#pdf-io.open"><code>io.open</code></a>, <a href="#pdf-os.rename"><code>os.rename</code></a>, <a href="#pdf-file:seek"><code>file:seek</code></a>, etc.).
+
+
+
+
+
+<hr><h3><a name="luaL_getmetafield"><code>luaL_getmetafield</code></a></h3><p>
+<span class="apii">[-0, +(0|1), <em>m</em>]</span>
+<pre>int luaL_getmetafield (lua_State *L, int obj, const char *e);</pre>
+
+<p>
+Pushes onto the stack the field <code>e</code> from the metatable
+of the object at index <code>obj</code> and returns the type of pushed value.
+If the object does not have a metatable,
+or if the metatable does not have this field,
+pushes nothing and returns <code>LUA_TNIL</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_getmetatable"><code>luaL_getmetatable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>int luaL_getmetatable (lua_State *L, const char *tname);</pre>
+
+<p>
+Pushes onto the stack the metatable associated with name <code>tname</code>
+in the registry (see <a href="#luaL_newmetatable"><code>luaL_newmetatable</code></a>)
+(<b>nil</b> if there is no metatable associated with that name).
+Returns the type of the pushed value.
+
+
+
+
+
+<hr><h3><a name="luaL_getsubtable"><code>luaL_getsubtable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>int luaL_getsubtable (lua_State *L, int idx, const char *fname);</pre>
+
+<p>
+Ensures that the value <code>t[fname]</code>,
+where <code>t</code> is the value at index <code>idx</code>,
+is a table,
+and pushes that table onto the stack.
+Returns true if it finds a previous table there
+and false if it creates a new table.
+
+
+
+
+
+<hr><h3><a name="luaL_gsub"><code>luaL_gsub</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>const char *luaL_gsub (lua_State *L,
+ const char *s,
+ const char *p,
+ const char *r);</pre>
+
+<p>
+Creates a copy of string <code>s</code> by replacing
+any occurrence of the string <code>p</code>
+with the string <code>r</code>.
+Pushes the resulting string on the stack and returns it.
+
+
+
+
+
+<hr><h3><a name="luaL_len"><code>luaL_len</code></a></h3><p>
+<span class="apii">[-0, +0, <em>e</em>]</span>
+<pre>lua_Integer luaL_len (lua_State *L, int index);</pre>
+
+<p>
+Returns the "length" of the value at the given index
+as a number;
+it is equivalent to the '<code>#</code>' operator in Lua (see <a href="#3.4.7">&sect;3.4.7</a>).
+Raises an error if the result of the operation is not an integer.
+(This case only can happen through metamethods.)
+
+
+
+
+
+<hr><h3><a name="luaL_loadbuffer"><code>luaL_loadbuffer</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int luaL_loadbuffer (lua_State *L,
+ const char *buff,
+ size_t sz,
+ const char *name);</pre>
+
+<p>
+Equivalent to <a href="#luaL_loadbufferx"><code>luaL_loadbufferx</code></a> with <code>mode</code> equal to <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_loadbufferx"><code>luaL_loadbufferx</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int luaL_loadbufferx (lua_State *L,
+ const char *buff,
+ size_t sz,
+ const char *name,
+ const char *mode);</pre>
+
+<p>
+Loads a buffer as a Lua chunk.
+This function uses <a href="#lua_load"><code>lua_load</code></a> to load the chunk in the
+buffer pointed to by <code>buff</code> with size <code>sz</code>.
+
+
+<p>
+This function returns the same results as <a href="#lua_load"><code>lua_load</code></a>.
+<code>name</code> is the chunk name,
+used for debug information and error messages.
+The string <code>mode</code> works as in function <a href="#lua_load"><code>lua_load</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_loadfile"><code>luaL_loadfile</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>int luaL_loadfile (lua_State *L, const char *filename);</pre>
+
+<p>
+Equivalent to <a href="#luaL_loadfilex"><code>luaL_loadfilex</code></a> with <code>mode</code> equal to <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_loadfilex"><code>luaL_loadfilex</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>int luaL_loadfilex (lua_State *L, const char *filename,
+ const char *mode);</pre>
+
+<p>
+Loads a file as a Lua chunk.
+This function uses <a href="#lua_load"><code>lua_load</code></a> to load the chunk in the file
+named <code>filename</code>.
+If <code>filename</code> is <code>NULL</code>,
+then it loads from the standard input.
+The first line in the file is ignored if it starts with a <code>#</code>.
+
+
+<p>
+The string <code>mode</code> works as in function <a href="#lua_load"><code>lua_load</code></a>.
+
+
+<p>
+This function returns the same results as <a href="#lua_load"><code>lua_load</code></a>,
+but it has an extra error code <a name="pdf-LUA_ERRFILE"><code>LUA_ERRFILE</code></a>
+for file-related errors
+(e.g., it cannot open or read the file).
+
+
+<p>
+As <a href="#lua_load"><code>lua_load</code></a>, this function only loads the chunk;
+it does not run it.
+
+
+
+
+
+<hr><h3><a name="luaL_loadstring"><code>luaL_loadstring</code></a></h3><p>
+<span class="apii">[-0, +1, &ndash;]</span>
+<pre>int luaL_loadstring (lua_State *L, const char *s);</pre>
+
+<p>
+Loads a string as a Lua chunk.
+This function uses <a href="#lua_load"><code>lua_load</code></a> to load the chunk in
+the zero-terminated string <code>s</code>.
+
+
+<p>
+This function returns the same results as <a href="#lua_load"><code>lua_load</code></a>.
+
+
+<p>
+Also as <a href="#lua_load"><code>lua_load</code></a>, this function only loads the chunk;
+it does not run it.
+
+
+
+
+
+<hr><h3><a name="luaL_newlib"><code>luaL_newlib</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void luaL_newlib (lua_State *L, const luaL_Reg l[]);</pre>
+
+<p>
+Creates a new table and registers there
+the functions in list <code>l</code>.
+
+
+<p>
+It is implemented as the following macro:
+
+<pre>
+ (luaL_newlibtable(L,l), luaL_setfuncs(L,l,0))
+</pre><p>
+The array <code>l</code> must be the actual array,
+not a pointer to it.
+
+
+
+
+
+<hr><h3><a name="luaL_newlibtable"><code>luaL_newlibtable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void luaL_newlibtable (lua_State *L, const luaL_Reg l[]);</pre>
+
+<p>
+Creates a new table with a size optimized
+to store all entries in the array <code>l</code>
+(but does not actually store them).
+It is intended to be used in conjunction with <a href="#luaL_setfuncs"><code>luaL_setfuncs</code></a>
+(see <a href="#luaL_newlib"><code>luaL_newlib</code></a>).
+
+
+<p>
+It is implemented as a macro.
+The array <code>l</code> must be the actual array,
+not a pointer to it.
+
+
+
+
+
+<hr><h3><a name="luaL_newmetatable"><code>luaL_newmetatable</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>int luaL_newmetatable (lua_State *L, const char *tname);</pre>
+
+<p>
+If the registry already has the key <code>tname</code>,
+returns 0.
+Otherwise,
+creates a new table to be used as a metatable for userdata,
+adds to this new table the pair <code>__name = tname</code>,
+adds to the registry the pair <code>[tname] = new table</code>,
+and returns 1.
+(The entry <code>__name</code> is used by some error-reporting functions.)
+
+
+<p>
+In both cases pushes onto the stack the final value associated
+with <code>tname</code> in the registry.
+
+
+
+
+
+<hr><h3><a name="luaL_newstate"><code>luaL_newstate</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>lua_State *luaL_newstate (void);</pre>
+
+<p>
+Creates a new Lua state.
+It calls <a href="#lua_newstate"><code>lua_newstate</code></a> with an
+allocator based on the standard&nbsp;C <code>realloc</code> function
+and then sets a panic function (see <a href="#4.6">&sect;4.6</a>) that prints
+an error message to the standard error output in case of fatal
+errors.
+
+
+<p>
+Returns the new state,
+or <code>NULL</code> if there is a memory allocation error.
+
+
+
+
+
+<hr><h3><a name="luaL_openlibs"><code>luaL_openlibs</code></a></h3><p>
+<span class="apii">[-0, +0, <em>e</em>]</span>
+<pre>void luaL_openlibs (lua_State *L);</pre>
+
+<p>
+Opens all standard Lua libraries into the given state.
+
+
+
+
+
+<hr><h3><a name="luaL_opt"><code>luaL_opt</code></a></h3><p>
+<span class="apii">[-0, +0, <em>e</em>]</span>
+<pre>T luaL_opt (L, func, arg, dflt);</pre>
+
+<p>
+This macro is defined as follows:
+
+<pre>
+ (lua_isnoneornil(L,(arg)) ? (dflt) : func(L,(arg)))
+</pre><p>
+In words, if the argument <code>arg</code> is nil or absent,
+the macro results in the default <code>dflt</code>.
+Otherwise, it results in the result of calling <code>func</code>
+with the state <code>L</code> and the argument index <code>arg</code> as
+parameters.
+Note that it evaluates the expression <code>dflt</code> only if needed.
+
+
+
+
+
+<hr><h3><a name="luaL_optinteger"><code>luaL_optinteger</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>lua_Integer luaL_optinteger (lua_State *L,
+ int arg,
+ lua_Integer d);</pre>
+
+<p>
+If the function argument <code>arg</code> is an integer
+(or convertible to an integer),
+returns this integer.
+If this argument is absent or is <b>nil</b>,
+returns <code>d</code>.
+Otherwise, raises an error.
+
+
+
+
+
+<hr><h3><a name="luaL_optlstring"><code>luaL_optlstring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>const char *luaL_optlstring (lua_State *L,
+ int arg,
+ const char *d,
+ size_t *l);</pre>
+
+<p>
+If the function argument <code>arg</code> is a string,
+returns this string.
+If this argument is absent or is <b>nil</b>,
+returns <code>d</code>.
+Otherwise, raises an error.
+
+
+<p>
+If <code>l</code> is not <code>NULL</code>,
+fills the position <code>*l</code> with the result's length.
+If the result is <code>NULL</code>
+(only possible when returning <code>d</code> and <code>d == NULL</code>),
+its length is considered zero.
+
+
+<p>
+This function uses <a href="#lua_tolstring"><code>lua_tolstring</code></a> to get its result,
+so all conversions and caveats of that function apply here.
+
+
+
+
+
+<hr><h3><a name="luaL_optnumber"><code>luaL_optnumber</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>lua_Number luaL_optnumber (lua_State *L, int arg, lua_Number d);</pre>
+
+<p>
+If the function argument <code>arg</code> is a number,
+returns this number.
+If this argument is absent or is <b>nil</b>,
+returns <code>d</code>.
+Otherwise, raises an error.
+
+
+
+
+
+<hr><h3><a name="luaL_optstring"><code>luaL_optstring</code></a></h3><p>
+<span class="apii">[-0, +0, <em>v</em>]</span>
+<pre>const char *luaL_optstring (lua_State *L,
+ int arg,
+ const char *d);</pre>
+
+<p>
+If the function argument <code>arg</code> is a string,
+returns this string.
+If this argument is absent or is <b>nil</b>,
+returns <code>d</code>.
+Otherwise, raises an error.
+
+
+
+
+
+<hr><h3><a name="luaL_prepbuffer"><code>luaL_prepbuffer</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>char *luaL_prepbuffer (luaL_Buffer *B);</pre>
+
+<p>
+Equivalent to <a href="#luaL_prepbuffsize"><code>luaL_prepbuffsize</code></a>
+with the predefined size <a name="pdf-LUAL_BUFFERSIZE"><code>LUAL_BUFFERSIZE</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_prepbuffsize"><code>luaL_prepbuffsize</code></a></h3><p>
+<span class="apii">[-?, +?, <em>m</em>]</span>
+<pre>char *luaL_prepbuffsize (luaL_Buffer *B, size_t sz);</pre>
+
+<p>
+Returns an address to a space of size <code>sz</code>
+where you can copy a string to be added to buffer <code>B</code>
+(see <a href="#luaL_Buffer"><code>luaL_Buffer</code></a>).
+After copying the string into this space you must call
+<a href="#luaL_addsize"><code>luaL_addsize</code></a> with the size of the string to actually add
+it to the buffer.
+
+
+
+
+
+<hr><h3><a name="luaL_pushresult"><code>luaL_pushresult</code></a></h3><p>
+<span class="apii">[-?, +1, <em>m</em>]</span>
+<pre>void luaL_pushresult (luaL_Buffer *B);</pre>
+
+<p>
+Finishes the use of buffer <code>B</code> leaving the final string on
+the top of the stack.
+
+
+
+
+
+<hr><h3><a name="luaL_pushresultsize"><code>luaL_pushresultsize</code></a></h3><p>
+<span class="apii">[-?, +1, <em>m</em>]</span>
+<pre>void luaL_pushresultsize (luaL_Buffer *B, size_t sz);</pre>
+
+<p>
+Equivalent to the sequence <a href="#luaL_addsize"><code>luaL_addsize</code></a>, <a href="#luaL_pushresult"><code>luaL_pushresult</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_ref"><code>luaL_ref</code></a></h3><p>
+<span class="apii">[-1, +0, <em>m</em>]</span>
+<pre>int luaL_ref (lua_State *L, int t);</pre>
+
+<p>
+Creates and returns a <em>reference</em>,
+in the table at index <code>t</code>,
+for the object at the top of the stack (and pops the object).
+
+
+<p>
+A reference is a unique integer key.
+As long as you do not manually add integer keys into table <code>t</code>,
+<a href="#luaL_ref"><code>luaL_ref</code></a> ensures the uniqueness of the key it returns.
+You can retrieve an object referred by reference <code>r</code>
+by calling <code>lua_rawgeti(L, t, r)</code>.
+Function <a href="#luaL_unref"><code>luaL_unref</code></a> frees a reference and its associated object.
+
+
+<p>
+If the object at the top of the stack is <b>nil</b>,
+<a href="#luaL_ref"><code>luaL_ref</code></a> returns the constant <a name="pdf-LUA_REFNIL"><code>LUA_REFNIL</code></a>.
+The constant <a name="pdf-LUA_NOREF"><code>LUA_NOREF</code></a> is guaranteed to be different
+from any reference returned by <a href="#luaL_ref"><code>luaL_ref</code></a>.
+
+
+
+
+
+<hr><h3><a name="luaL_Reg"><code>luaL_Reg</code></a></h3>
+<pre>typedef struct luaL_Reg {
+ const char *name;
+ lua_CFunction func;
+} luaL_Reg;</pre>
+
+<p>
+Type for arrays of functions to be registered by
+<a href="#luaL_setfuncs"><code>luaL_setfuncs</code></a>.
+<code>name</code> is the function name and <code>func</code> is a pointer to
+the function.
+Any array of <a href="#luaL_Reg"><code>luaL_Reg</code></a> must end with a sentinel entry
+in which both <code>name</code> and <code>func</code> are <code>NULL</code>.
+
+
+
+
+
+<hr><h3><a name="luaL_requiref"><code>luaL_requiref</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>void luaL_requiref (lua_State *L, const char *modname,
+ lua_CFunction openf, int glb);</pre>
+
+<p>
+If <code>modname</code> is not already present in <a href="#pdf-package.loaded"><code>package.loaded</code></a>,
+calls function <code>openf</code> with string <code>modname</code> as an argument
+and sets the call result in <code>package.loaded[modname]</code>,
+as if that function has been called through <a href="#pdf-require"><code>require</code></a>.
+
+
+<p>
+If <code>glb</code> is true,
+also stores the module into global <code>modname</code>.
+
+
+<p>
+Leaves a copy of the module on the stack.
+
+
+
+
+
+<hr><h3><a name="luaL_setfuncs"><code>luaL_setfuncs</code></a></h3><p>
+<span class="apii">[-nup, +0, <em>m</em>]</span>
+<pre>void luaL_setfuncs (lua_State *L, const luaL_Reg *l, int nup);</pre>
+
+<p>
+Registers all functions in the array <code>l</code>
+(see <a href="#luaL_Reg"><code>luaL_Reg</code></a>) into the table on the top of the stack
+(below optional upvalues, see next).
+
+
+<p>
+When <code>nup</code> is not zero,
+all functions are created sharing <code>nup</code> upvalues,
+which must be previously pushed on the stack
+on top of the library table.
+These values are popped from the stack after the registration.
+
+
+
+
+
+<hr><h3><a name="luaL_setmetatable"><code>luaL_setmetatable</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void luaL_setmetatable (lua_State *L, const char *tname);</pre>
+
+<p>
+Sets the metatable of the object at the top of the stack
+as the metatable associated with name <code>tname</code>
+in the registry (see <a href="#luaL_newmetatable"><code>luaL_newmetatable</code></a>).
+
+
+
+
+
+<hr><h3><a name="luaL_Stream"><code>luaL_Stream</code></a></h3>
+<pre>typedef struct luaL_Stream {
+ FILE *f;
+ lua_CFunction closef;
+} luaL_Stream;</pre>
+
+<p>
+The standard representation for file handles,
+which is used by the standard I/O library.
+
+
+<p>
+A file handle is implemented as a full userdata,
+with a metatable called <code>LUA_FILEHANDLE</code>
+(where <code>LUA_FILEHANDLE</code> is a macro with the actual metatable's name).
+The metatable is created by the I/O library
+(see <a href="#luaL_newmetatable"><code>luaL_newmetatable</code></a>).
+
+
+<p>
+This userdata must start with the structure <code>luaL_Stream</code>;
+it can contain other data after this initial structure.
+Field <code>f</code> points to the corresponding C stream
+(or it can be <code>NULL</code> to indicate an incompletely created handle).
+Field <code>closef</code> points to a Lua function
+that will be called to close the stream
+when the handle is closed or collected;
+this function receives the file handle as its sole argument and
+must return either <b>true</b> (in case of success)
+or <b>nil</b> plus an error message (in case of error).
+Once Lua calls this field,
+it changes the field value to <code>NULL</code>
+to signal that the handle is closed.
+
+
+
+
+
+<hr><h3><a name="luaL_testudata"><code>luaL_testudata</code></a></h3><p>
+<span class="apii">[-0, +0, <em>m</em>]</span>
+<pre>void *luaL_testudata (lua_State *L, int arg, const char *tname);</pre>
+
+<p>
+This function works like <a href="#luaL_checkudata"><code>luaL_checkudata</code></a>,
+except that, when the test fails,
+it returns <code>NULL</code> instead of raising an error.
+
+
+
+
+
+<hr><h3><a name="luaL_tolstring"><code>luaL_tolstring</code></a></h3><p>
+<span class="apii">[-0, +1, <em>e</em>]</span>
+<pre>const char *luaL_tolstring (lua_State *L, int idx, size_t *len);</pre>
+
+<p>
+Converts any Lua value at the given index to a C&nbsp;string
+in a reasonable format.
+The resulting string is pushed onto the stack and also
+returned by the function.
+If <code>len</code> is not <code>NULL</code>,
+the function also sets <code>*len</code> with the string length.
+
+
+<p>
+If the value has a metatable with a <code>__tostring</code> field,
+then <code>luaL_tolstring</code> calls the corresponding metamethod
+with the value as argument,
+and uses the result of the call as its result.
+
+
+
+
+
+<hr><h3><a name="luaL_traceback"><code>luaL_traceback</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void luaL_traceback (lua_State *L, lua_State *L1, const char *msg,
+ int level);</pre>
+
+<p>
+Creates and pushes a traceback of the stack <code>L1</code>.
+If <code>msg</code> is not <code>NULL</code> it is appended
+at the beginning of the traceback.
+The <code>level</code> parameter tells at which level
+to start the traceback.
+
+
+
+
+
+<hr><h3><a name="luaL_typename"><code>luaL_typename</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>const char *luaL_typename (lua_State *L, int index);</pre>
+
+<p>
+Returns the name of the type of the value at the given index.
+
+
+
+
+
+<hr><h3><a name="luaL_unref"><code>luaL_unref</code></a></h3><p>
+<span class="apii">[-0, +0, &ndash;]</span>
+<pre>void luaL_unref (lua_State *L, int t, int ref);</pre>
+
+<p>
+Releases reference <code>ref</code> from the table at index <code>t</code>
+(see <a href="#luaL_ref"><code>luaL_ref</code></a>).
+The entry is removed from the table,
+so that the referred object can be collected.
+The reference <code>ref</code> is also freed to be used again.
+
+
+<p>
+If <code>ref</code> is <a href="#pdf-LUA_NOREF"><code>LUA_NOREF</code></a> or <a href="#pdf-LUA_REFNIL"><code>LUA_REFNIL</code></a>,
+<a href="#luaL_unref"><code>luaL_unref</code></a> does nothing.
+
+
+
+
+
+<hr><h3><a name="luaL_where"><code>luaL_where</code></a></h3><p>
+<span class="apii">[-0, +1, <em>m</em>]</span>
+<pre>void luaL_where (lua_State *L, int lvl);</pre>
+
+<p>
+Pushes onto the stack a string identifying the current position
+of the control at level <code>lvl</code> in the call stack.
+Typically this string has the following format:
+
+<pre>
+ <em>chunkname</em>:<em>currentline</em>:
+</pre><p>
+Level&nbsp;0 is the running function,
+level&nbsp;1 is the function that called the running function,
+etc.
+
+
+<p>
+This function is used to build a prefix for error messages.
+
+
+
+
+
+
+
+<h1>6 &ndash; <a name="6">Standard Libraries</a></h1>
+
+<p>
+The standard Lua libraries provide useful functions
+that are implemented directly through the C&nbsp;API.
+Some of these functions provide essential services to the language
+(e.g., <a href="#pdf-type"><code>type</code></a> and <a href="#pdf-getmetatable"><code>getmetatable</code></a>);
+others provide access to "outside" services (e.g., I/O);
+and others could be implemented in Lua itself,
+but are quite useful or have critical performance requirements that
+deserve an implementation in C (e.g., <a href="#pdf-table.sort"><code>table.sort</code></a>).
+
+
+<p>
+All libraries are implemented through the official C&nbsp;API
+and are provided as separate C&nbsp;modules.
+Currently, Lua has the following standard libraries:
+
+<ul>
+
+<li>basic library (<a href="#6.1">&sect;6.1</a>);</li>
+
+<li>coroutine library (<a href="#6.2">&sect;6.2</a>);</li>
+
+<li>package library (<a href="#6.3">&sect;6.3</a>);</li>
+
+<li>string manipulation (<a href="#6.4">&sect;6.4</a>);</li>
+
+<li>basic UTF-8 support (<a href="#6.5">&sect;6.5</a>);</li>
+
+<li>table manipulation (<a href="#6.6">&sect;6.6</a>);</li>
+
+<li>mathematical functions (<a href="#6.7">&sect;6.7</a>) (sin, log, etc.);</li>
+
+<li>input and output (<a href="#6.8">&sect;6.8</a>);</li>
+
+<li>operating system facilities (<a href="#6.9">&sect;6.9</a>);</li>
+
+<li>debug facilities (<a href="#6.10">&sect;6.10</a>).</li>
+
+</ul><p>
+Except for the basic and the package libraries,
+each library provides all its functions as fields of a global table
+or as methods of its objects.
+
+
+<p>
+To have access to these libraries,
+the C&nbsp;host program should call the <a href="#luaL_openlibs"><code>luaL_openlibs</code></a> function,
+which opens all standard libraries.
+Alternatively,
+the host program can open them individually by using
+<a href="#luaL_requiref"><code>luaL_requiref</code></a> to call
+<a name="pdf-luaopen_base"><code>luaopen_base</code></a> (for the basic library),
+<a name="pdf-luaopen_package"><code>luaopen_package</code></a> (for the package library),
+<a name="pdf-luaopen_coroutine"><code>luaopen_coroutine</code></a> (for the coroutine library),
+<a name="pdf-luaopen_string"><code>luaopen_string</code></a> (for the string library),
+<a name="pdf-luaopen_utf8"><code>luaopen_utf8</code></a> (for the UTF8 library),
+<a name="pdf-luaopen_table"><code>luaopen_table</code></a> (for the table library),
+<a name="pdf-luaopen_math"><code>luaopen_math</code></a> (for the mathematical library),
+<a name="pdf-luaopen_io"><code>luaopen_io</code></a> (for the I/O library),
+<a name="pdf-luaopen_os"><code>luaopen_os</code></a> (for the operating system library),
+and <a name="pdf-luaopen_debug"><code>luaopen_debug</code></a> (for the debug library).
+These functions are declared in <a name="pdf-lualib.h"><code>lualib.h</code></a>.
+
+
+
+<h2>6.1 &ndash; <a name="6.1">Basic Functions</a></h2>
+
+<p>
+The basic library provides core functions to Lua.
+If you do not include this library in your application,
+you should check carefully whether you need to provide
+implementations for some of its facilities.
+
+
+<p>
+<hr><h3><a name="pdf-assert"><code>assert (v [, message])</code></a></h3>
+
+
+<p>
+Calls <a href="#pdf-error"><code>error</code></a> if
+the value of its argument <code>v</code> is false (i.e., <b>nil</b> or <b>false</b>);
+otherwise, returns all its arguments.
+In case of error,
+<code>message</code> is the error object;
+when absent, it defaults to "<code>assertion failed!</code>"
+
+
+
+
+<p>
+<hr><h3><a name="pdf-collectgarbage"><code>collectgarbage ([opt [, arg]])</code></a></h3>
+
+
+<p>
+This function is a generic interface to the garbage collector.
+It performs different functions according to its first argument, <code>opt</code>:
+
+<ul>
+
+<li><b>"<code>collect</code>": </b>
+performs a full garbage-collection cycle.
+This is the default option.
+</li>
+
+<li><b>"<code>stop</code>": </b>
+stops automatic execution of the garbage collector.
+The collector will run only when explicitly invoked,
+until a call to restart it.
+</li>
+
+<li><b>"<code>restart</code>": </b>
+restarts automatic execution of the garbage collector.
+</li>
+
+<li><b>"<code>count</code>": </b>
+returns the total memory in use by Lua in Kbytes.
+The value has a fractional part,
+so that it multiplied by 1024
+gives the exact number of bytes in use by Lua
+(except for overflows).
+</li>
+
+<li><b>"<code>step</code>": </b>
+performs a garbage-collection step.
+The step "size" is controlled by <code>arg</code>.
+With a zero value,
+the collector will perform one basic (indivisible) step.
+For non-zero values,
+the collector will perform as if that amount of memory
+(in KBytes) had been allocated by Lua.
+Returns <b>true</b> if the step finished a collection cycle.
+</li>
+
+<li><b>"<code>setpause</code>": </b>
+sets <code>arg</code> as the new value for the <em>pause</em> of
+the collector (see <a href="#2.5">&sect;2.5</a>).
+Returns the previous value for <em>pause</em>.
+</li>
+
+<li><b>"<code>setstepmul</code>": </b>
+sets <code>arg</code> as the new value for the <em>step multiplier</em> of
+the collector (see <a href="#2.5">&sect;2.5</a>).
+Returns the previous value for <em>step</em>.
+</li>
+
+<li><b>"<code>isrunning</code>": </b>
+returns a boolean that tells whether the collector is running
+(i.e., not stopped).
+</li>
+
+</ul>
+
+
+
+<p>
+<hr><h3><a name="pdf-dofile"><code>dofile ([filename])</code></a></h3>
+Opens the named file and executes its contents as a Lua chunk.
+When called without arguments,
+<code>dofile</code> executes the contents of the standard input (<code>stdin</code>).
+Returns all values returned by the chunk.
+In case of errors, <code>dofile</code> propagates the error
+to its caller (that is, <code>dofile</code> does not run in protected mode).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-error"><code>error (message [, level])</code></a></h3>
+Terminates the last protected function called
+and returns <code>message</code> as the error object.
+Function <code>error</code> never returns.
+
+
+<p>
+Usually, <code>error</code> adds some information about the error position
+at the beginning of the message, if the message is a string.
+The <code>level</code> argument specifies how to get the error position.
+With level&nbsp;1 (the default), the error position is where the
+<code>error</code> function was called.
+Level&nbsp;2 points the error to where the function
+that called <code>error</code> was called; and so on.
+Passing a level&nbsp;0 avoids the addition of error position information
+to the message.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-_G"><code>_G</code></a></h3>
+A global variable (not a function) that
+holds the global environment (see <a href="#2.2">&sect;2.2</a>).
+Lua itself does not use this variable;
+changing its value does not affect any environment,
+nor vice versa.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-getmetatable"><code>getmetatable (object)</code></a></h3>
+
+
+<p>
+If <code>object</code> does not have a metatable, returns <b>nil</b>.
+Otherwise,
+if the object's metatable has a <code>__metatable</code> field,
+returns the associated value.
+Otherwise, returns the metatable of the given object.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-ipairs"><code>ipairs (t)</code></a></h3>
+
+
+<p>
+Returns three values (an iterator function, the table <code>t</code>, and 0)
+so that the construction
+
+<pre>
+ for i,v in ipairs(t) do <em>body</em> end
+</pre><p>
+will iterate over the key&ndash;value pairs
+(<code>1,t[1]</code>), (<code>2,t[2]</code>), ...,
+up to the first nil value.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-load"><code>load (chunk [, chunkname [, mode [, env]]])</code></a></h3>
+
+
+<p>
+Loads a chunk.
+
+
+<p>
+If <code>chunk</code> is a string, the chunk is this string.
+If <code>chunk</code> is a function,
+<code>load</code> calls it repeatedly to get the chunk pieces.
+Each call to <code>chunk</code> must return a string that concatenates
+with previous results.
+A return of an empty string, <b>nil</b>, or no value signals the end of the chunk.
+
+
+<p>
+If there are no syntactic errors,
+returns the compiled chunk as a function;
+otherwise, returns <b>nil</b> plus the error message.
+
+
+<p>
+If the resulting function has upvalues,
+the first upvalue is set to the value of <code>env</code>,
+if that parameter is given,
+or to the value of the global environment.
+Other upvalues are initialized with <b>nil</b>.
+(When you load a main chunk,
+the resulting function will always have exactly one upvalue,
+the <code>_ENV</code> variable (see <a href="#2.2">&sect;2.2</a>).
+However,
+when you load a binary chunk created from a function (see <a href="#pdf-string.dump"><code>string.dump</code></a>),
+the resulting function can have an arbitrary number of upvalues.)
+All upvalues are fresh, that is,
+they are not shared with any other function.
+
+
+<p>
+<code>chunkname</code> is used as the name of the chunk for error messages
+and debug information (see <a href="#4.9">&sect;4.9</a>).
+When absent,
+it defaults to <code>chunk</code>, if <code>chunk</code> is a string,
+or to "<code>=(load)</code>" otherwise.
+
+
+<p>
+The string <code>mode</code> controls whether the chunk can be text or binary
+(that is, a precompiled chunk).
+It may be the string "<code>b</code>" (only binary chunks),
+"<code>t</code>" (only text chunks),
+or "<code>bt</code>" (both binary and text).
+The default is "<code>bt</code>".
+
+
+<p>
+Lua does not check the consistency of binary chunks.
+Maliciously crafted binary chunks can crash
+the interpreter.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-loadfile"><code>loadfile ([filename [, mode [, env]]])</code></a></h3>
+
+
+<p>
+Similar to <a href="#pdf-load"><code>load</code></a>,
+but gets the chunk from file <code>filename</code>
+or from the standard input,
+if no file name is given.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-next"><code>next (table [, index])</code></a></h3>
+
+
+<p>
+Allows a program to traverse all fields of a table.
+Its first argument is a table and its second argument
+is an index in this table.
+<code>next</code> returns the next index of the table
+and its associated value.
+When called with <b>nil</b> as its second argument,
+<code>next</code> returns an initial index
+and its associated value.
+When called with the last index,
+or with <b>nil</b> in an empty table,
+<code>next</code> returns <b>nil</b>.
+If the second argument is absent, then it is interpreted as <b>nil</b>.
+In particular,
+you can use <code>next(t)</code> to check whether a table is empty.
+
+
+<p>
+The order in which the indices are enumerated is not specified,
+<em>even for numeric indices</em>.
+(To traverse a table in numerical order,
+use a numerical <b>for</b>.)
+
+
+<p>
+The behavior of <code>next</code> is undefined if,
+during the traversal,
+you assign any value to a non-existent field in the table.
+You may however modify existing fields.
+In particular, you may clear existing fields.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-pairs"><code>pairs (t)</code></a></h3>
+
+
+<p>
+If <code>t</code> has a metamethod <code>__pairs</code>,
+calls it with <code>t</code> as argument and returns the first three
+results from the call.
+
+
+<p>
+Otherwise,
+returns three values: the <a href="#pdf-next"><code>next</code></a> function, the table <code>t</code>, and <b>nil</b>,
+so that the construction
+
+<pre>
+ for k,v in pairs(t) do <em>body</em> end
+</pre><p>
+will iterate over all key&ndash;value pairs of table <code>t</code>.
+
+
+<p>
+See function <a href="#pdf-next"><code>next</code></a> for the caveats of modifying
+the table during its traversal.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-pcall"><code>pcall (f [, arg1, &middot;&middot;&middot;])</code></a></h3>
+
+
+<p>
+Calls function <code>f</code> with
+the given arguments in <em>protected mode</em>.
+This means that any error inside&nbsp;<code>f</code> is not propagated;
+instead, <code>pcall</code> catches the error
+and returns a status code.
+Its first result is the status code (a boolean),
+which is true if the call succeeds without errors.
+In such case, <code>pcall</code> also returns all results from the call,
+after this first result.
+In case of any error, <code>pcall</code> returns <b>false</b> plus the error message.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-print"><code>print (&middot;&middot;&middot;)</code></a></h3>
+Receives any number of arguments
+and prints their values to <code>stdout</code>,
+using the <a href="#pdf-tostring"><code>tostring</code></a> function to convert each argument to a string.
+<code>print</code> is not intended for formatted output,
+but only as a quick way to show a value,
+for instance for debugging.
+For complete control over the output,
+use <a href="#pdf-string.format"><code>string.format</code></a> and <a href="#pdf-io.write"><code>io.write</code></a>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-rawequal"><code>rawequal (v1, v2)</code></a></h3>
+Checks whether <code>v1</code> is equal to <code>v2</code>,
+without invoking the <code>__eq</code> metamethod.
+Returns a boolean.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-rawget"><code>rawget (table, index)</code></a></h3>
+Gets the real value of <code>table[index]</code>,
+without invoking the <code>__index</code> metamethod.
+<code>table</code> must be a table;
+<code>index</code> may be any value.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-rawlen"><code>rawlen (v)</code></a></h3>
+Returns the length of the object <code>v</code>,
+which must be a table or a string,
+without invoking the <code>__len</code> metamethod.
+Returns an integer.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-rawset"><code>rawset (table, index, value)</code></a></h3>
+Sets the real value of <code>table[index]</code> to <code>value</code>,
+without invoking the <code>__newindex</code> metamethod.
+<code>table</code> must be a table,
+<code>index</code> any value different from <b>nil</b> and NaN,
+and <code>value</code> any Lua value.
+
+
+<p>
+This function returns <code>table</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-select"><code>select (index, &middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+If <code>index</code> is a number,
+returns all arguments after argument number <code>index</code>;
+a negative number indexes from the end (-1 is the last argument).
+Otherwise, <code>index</code> must be the string <code>"#"</code>,
+and <code>select</code> returns the total number of extra arguments it received.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-setmetatable"><code>setmetatable (table, metatable)</code></a></h3>
+
+
+<p>
+Sets the metatable for the given table.
+(To change the metatable of other types from Lua code,
+you must use the debug library (<a href="#6.10">&sect;6.10</a>).)
+If <code>metatable</code> is <b>nil</b>,
+removes the metatable of the given table.
+If the original metatable has a <code>__metatable</code> field,
+raises an error.
+
+
+<p>
+This function returns <code>table</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-tonumber"><code>tonumber (e [, base])</code></a></h3>
+
+
+<p>
+When called with no <code>base</code>,
+<code>tonumber</code> tries to convert its argument to a number.
+If the argument is already a number or
+a string convertible to a number,
+then <code>tonumber</code> returns this number;
+otherwise, it returns <b>nil</b>.
+
+
+<p>
+The conversion of strings can result in integers or floats,
+according to the lexical conventions of Lua (see <a href="#3.1">&sect;3.1</a>).
+(The string may have leading and trailing spaces and a sign.)
+
+
+<p>
+When called with <code>base</code>,
+then <code>e</code> must be a string to be interpreted as
+an integer numeral in that base.
+The base may be any integer between 2 and 36, inclusive.
+In bases above&nbsp;10, the letter '<code>A</code>' (in either upper or lower case)
+represents&nbsp;10, '<code>B</code>' represents&nbsp;11, and so forth,
+with '<code>Z</code>' representing 35.
+If the string <code>e</code> is not a valid numeral in the given base,
+the function returns <b>nil</b>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-tostring"><code>tostring (v)</code></a></h3>
+Receives a value of any type and
+converts it to a string in a human-readable format.
+(For complete control of how numbers are converted,
+use <a href="#pdf-string.format"><code>string.format</code></a>.)
+
+
+<p>
+If the metatable of <code>v</code> has a <code>__tostring</code> field,
+then <code>tostring</code> calls the corresponding value
+with <code>v</code> as argument,
+and uses the result of the call as its result.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-type"><code>type (v)</code></a></h3>
+Returns the type of its only argument, coded as a string.
+The possible results of this function are
+"<code>nil</code>" (a string, not the value <b>nil</b>),
+"<code>number</code>",
+"<code>string</code>",
+"<code>boolean</code>",
+"<code>table</code>",
+"<code>function</code>",
+"<code>thread</code>",
+and "<code>userdata</code>".
+
+
+
+
+<p>
+<hr><h3><a name="pdf-_VERSION"><code>_VERSION</code></a></h3>
+
+
+<p>
+A global variable (not a function) that
+holds a string containing the running Lua version.
+The current value of this variable is "<code>Lua 5.3</code>".
+
+
+
+
+<p>
+<hr><h3><a name="pdf-xpcall"><code>xpcall (f, msgh [, arg1, &middot;&middot;&middot;])</code></a></h3>
+
+
+<p>
+This function is similar to <a href="#pdf-pcall"><code>pcall</code></a>,
+except that it sets a new message handler <code>msgh</code>.
+
+
+
+
+
+
+
+<h2>6.2 &ndash; <a name="6.2">Coroutine Manipulation</a></h2>
+
+<p>
+This library comprises the operations to manipulate coroutines,
+which come inside the table <a name="pdf-coroutine"><code>coroutine</code></a>.
+See <a href="#2.6">&sect;2.6</a> for a general description of coroutines.
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.create"><code>coroutine.create (f)</code></a></h3>
+
+
+<p>
+Creates a new coroutine, with body <code>f</code>.
+<code>f</code> must be a function.
+Returns this new coroutine,
+an object with type <code>"thread"</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.isyieldable"><code>coroutine.isyieldable ()</code></a></h3>
+
+
+<p>
+Returns true when the running coroutine can yield.
+
+
+<p>
+A running coroutine is yieldable if it is not the main thread and
+it is not inside a non-yieldable C&nbsp;function.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.resume"><code>coroutine.resume (co [, val1, &middot;&middot;&middot;])</code></a></h3>
+
+
+<p>
+Starts or continues the execution of coroutine <code>co</code>.
+The first time you resume a coroutine,
+it starts running its body.
+The values <code>val1</code>, ... are passed
+as the arguments to the body function.
+If the coroutine has yielded,
+<code>resume</code> restarts it;
+the values <code>val1</code>, ... are passed
+as the results from the yield.
+
+
+<p>
+If the coroutine runs without any errors,
+<code>resume</code> returns <b>true</b> plus any values passed to <code>yield</code>
+(when the coroutine yields) or any values returned by the body function
+(when the coroutine terminates).
+If there is any error,
+<code>resume</code> returns <b>false</b> plus the error message.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.running"><code>coroutine.running ()</code></a></h3>
+
+
+<p>
+Returns the running coroutine plus a boolean,
+true when the running coroutine is the main one.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.status"><code>coroutine.status (co)</code></a></h3>
+
+
+<p>
+Returns the status of coroutine <code>co</code>, as a string:
+<code>"running"</code>,
+if the coroutine is running (that is, it called <code>status</code>);
+<code>"suspended"</code>, if the coroutine is suspended in a call to <code>yield</code>,
+or if it has not started running yet;
+<code>"normal"</code> if the coroutine is active but not running
+(that is, it has resumed another coroutine);
+and <code>"dead"</code> if the coroutine has finished its body function,
+or if it has stopped with an error.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.wrap"><code>coroutine.wrap (f)</code></a></h3>
+
+
+<p>
+Creates a new coroutine, with body <code>f</code>.
+<code>f</code> must be a function.
+Returns a function that resumes the coroutine each time it is called.
+Any arguments passed to the function behave as the
+extra arguments to <code>resume</code>.
+Returns the same values returned by <code>resume</code>,
+except the first boolean.
+In case of error, propagates the error.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-coroutine.yield"><code>coroutine.yield (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Suspends the execution of the calling coroutine.
+Any arguments to <code>yield</code> are passed as extra results to <code>resume</code>.
+
+
+
+
+
+
+
+<h2>6.3 &ndash; <a name="6.3">Modules</a></h2>
+
+<p>
+The package library provides basic
+facilities for loading modules in Lua.
+It exports one function directly in the global environment:
+<a href="#pdf-require"><code>require</code></a>.
+Everything else is exported in a table <a name="pdf-package"><code>package</code></a>.
+
+
+<p>
+<hr><h3><a name="pdf-require"><code>require (modname)</code></a></h3>
+
+
+<p>
+Loads the given module.
+The function starts by looking into the <a href="#pdf-package.loaded"><code>package.loaded</code></a> table
+to determine whether <code>modname</code> is already loaded.
+If it is, then <code>require</code> returns the value stored
+at <code>package.loaded[modname]</code>.
+Otherwise, it tries to find a <em>loader</em> for the module.
+
+
+<p>
+To find a loader,
+<code>require</code> is guided by the <a href="#pdf-package.searchers"><code>package.searchers</code></a> sequence.
+By changing this sequence,
+we can change how <code>require</code> looks for a module.
+The following explanation is based on the default configuration
+for <a href="#pdf-package.searchers"><code>package.searchers</code></a>.
+
+
+<p>
+First <code>require</code> queries <code>package.preload[modname]</code>.
+If it has a value,
+this value (which must be a function) is the loader.
+Otherwise <code>require</code> searches for a Lua loader using the
+path stored in <a href="#pdf-package.path"><code>package.path</code></a>.
+If that also fails, it searches for a C&nbsp;loader using the
+path stored in <a href="#pdf-package.cpath"><code>package.cpath</code></a>.
+If that also fails,
+it tries an <em>all-in-one</em> loader (see <a href="#pdf-package.searchers"><code>package.searchers</code></a>).
+
+
+<p>
+Once a loader is found,
+<code>require</code> calls the loader with two arguments:
+<code>modname</code> and an extra value dependent on how it got the loader.
+(If the loader came from a file,
+this extra value is the file name.)
+If the loader returns any non-nil value,
+<code>require</code> assigns the returned value to <code>package.loaded[modname]</code>.
+If the loader does not return a non-nil value and
+has not assigned any value to <code>package.loaded[modname]</code>,
+then <code>require</code> assigns <b>true</b> to this entry.
+In any case, <code>require</code> returns the
+final value of <code>package.loaded[modname]</code>.
+
+
+<p>
+If there is any error loading or running the module,
+or if it cannot find any loader for the module,
+then <code>require</code> raises an error.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.config"><code>package.config</code></a></h3>
+
+
+<p>
+A string describing some compile-time configurations for packages.
+This string is a sequence of lines:
+
+<ul>
+
+<li>The first line is the directory separator string.
+Default is '<code>\</code>' for Windows and '<code>/</code>' for all other systems.</li>
+
+<li>The second line is the character that separates templates in a path.
+Default is '<code>;</code>'.</li>
+
+<li>The third line is the string that marks the
+substitution points in a template.
+Default is '<code>?</code>'.</li>
+
+<li>The fourth line is a string that, in a path in Windows,
+is replaced by the executable's directory.
+Default is '<code>!</code>'.</li>
+
+<li>The fifth line is a mark to ignore all text after it
+when building the <code>luaopen_</code> function name.
+Default is '<code>-</code>'.</li>
+
+</ul>
+
+
+
+<p>
+<hr><h3><a name="pdf-package.cpath"><code>package.cpath</code></a></h3>
+
+
+<p>
+The path used by <a href="#pdf-require"><code>require</code></a> to search for a C&nbsp;loader.
+
+
+<p>
+Lua initializes the C&nbsp;path <a href="#pdf-package.cpath"><code>package.cpath</code></a> in the same way
+it initializes the Lua path <a href="#pdf-package.path"><code>package.path</code></a>,
+using the environment variable <a name="pdf-LUA_CPATH_5_3"><code>LUA_CPATH_5_3</code></a>,
+or the environment variable <a name="pdf-LUA_CPATH"><code>LUA_CPATH</code></a>,
+or a default path defined in <code>luaconf.h</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.loaded"><code>package.loaded</code></a></h3>
+
+
+<p>
+A table used by <a href="#pdf-require"><code>require</code></a> to control which
+modules are already loaded.
+When you require a module <code>modname</code> and
+<code>package.loaded[modname]</code> is not false,
+<a href="#pdf-require"><code>require</code></a> simply returns the value stored there.
+
+
+<p>
+This variable is only a reference to the real table;
+assignments to this variable do not change the
+table used by <a href="#pdf-require"><code>require</code></a>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.loadlib"><code>package.loadlib (libname, funcname)</code></a></h3>
+
+
+<p>
+Dynamically links the host program with the C&nbsp;library <code>libname</code>.
+
+
+<p>
+If <code>funcname</code> is "<code>*</code>",
+then it only links with the library,
+making the symbols exported by the library
+available to other dynamically linked libraries.
+Otherwise,
+it looks for a function <code>funcname</code> inside the library
+and returns this function as a C&nbsp;function.
+So, <code>funcname</code> must follow the <a href="#lua_CFunction"><code>lua_CFunction</code></a> prototype
+(see <a href="#lua_CFunction"><code>lua_CFunction</code></a>).
+
+
+<p>
+This is a low-level function.
+It completely bypasses the package and module system.
+Unlike <a href="#pdf-require"><code>require</code></a>,
+it does not perform any path searching and
+does not automatically adds extensions.
+<code>libname</code> must be the complete file name of the C&nbsp;library,
+including if necessary a path and an extension.
+<code>funcname</code> must be the exact name exported by the C&nbsp;library
+(which may depend on the C&nbsp;compiler and linker used).
+
+
+<p>
+This function is not supported by Standard&nbsp;C.
+As such, it is only available on some platforms
+(Windows, Linux, Mac OS X, Solaris, BSD,
+plus other Unix systems that support the <code>dlfcn</code> standard).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.path"><code>package.path</code></a></h3>
+
+
+<p>
+The path used by <a href="#pdf-require"><code>require</code></a> to search for a Lua loader.
+
+
+<p>
+At start-up, Lua initializes this variable with
+the value of the environment variable <a name="pdf-LUA_PATH_5_3"><code>LUA_PATH_5_3</code></a> or
+the environment variable <a name="pdf-LUA_PATH"><code>LUA_PATH</code></a> or
+with a default path defined in <code>luaconf.h</code>,
+if those environment variables are not defined.
+Any "<code>;;</code>" in the value of the environment variable
+is replaced by the default path.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.preload"><code>package.preload</code></a></h3>
+
+
+<p>
+A table to store loaders for specific modules
+(see <a href="#pdf-require"><code>require</code></a>).
+
+
+<p>
+This variable is only a reference to the real table;
+assignments to this variable do not change the
+table used by <a href="#pdf-require"><code>require</code></a>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.searchers"><code>package.searchers</code></a></h3>
+
+
+<p>
+A table used by <a href="#pdf-require"><code>require</code></a> to control how to load modules.
+
+
+<p>
+Each entry in this table is a <em>searcher function</em>.
+When looking for a module,
+<a href="#pdf-require"><code>require</code></a> calls each of these searchers in ascending order,
+with the module name (the argument given to <a href="#pdf-require"><code>require</code></a>) as its
+sole parameter.
+The function can return another function (the module <em>loader</em>)
+plus an extra value that will be passed to that loader,
+or a string explaining why it did not find that module
+(or <b>nil</b> if it has nothing to say).
+
+
+<p>
+Lua initializes this table with four searcher functions.
+
+
+<p>
+The first searcher simply looks for a loader in the
+<a href="#pdf-package.preload"><code>package.preload</code></a> table.
+
+
+<p>
+The second searcher looks for a loader as a Lua library,
+using the path stored at <a href="#pdf-package.path"><code>package.path</code></a>.
+The search is done as described in function <a href="#pdf-package.searchpath"><code>package.searchpath</code></a>.
+
+
+<p>
+The third searcher looks for a loader as a C&nbsp;library,
+using the path given by the variable <a href="#pdf-package.cpath"><code>package.cpath</code></a>.
+Again,
+the search is done as described in function <a href="#pdf-package.searchpath"><code>package.searchpath</code></a>.
+For instance,
+if the C&nbsp;path is the string
+
+<pre>
+ "./?.so;./?.dll;/usr/local/?/init.so"
+</pre><p>
+the searcher for module <code>foo</code>
+will try to open the files <code>./foo.so</code>, <code>./foo.dll</code>,
+and <code>/usr/local/foo/init.so</code>, in that order.
+Once it finds a C&nbsp;library,
+this searcher first uses a dynamic link facility to link the
+application with the library.
+Then it tries to find a C&nbsp;function inside the library to
+be used as the loader.
+The name of this C&nbsp;function is the string "<code>luaopen_</code>"
+concatenated with a copy of the module name where each dot
+is replaced by an underscore.
+Moreover, if the module name has a hyphen,
+its suffix after (and including) the first hyphen is removed.
+For instance, if the module name is <code>a.b.c-v2.1</code>,
+the function name will be <code>luaopen_a_b_c</code>.
+
+
+<p>
+The fourth searcher tries an <em>all-in-one loader</em>.
+It searches the C&nbsp;path for a library for
+the root name of the given module.
+For instance, when requiring <code>a.b.c</code>,
+it will search for a C&nbsp;library for <code>a</code>.
+If found, it looks into it for an open function for
+the submodule;
+in our example, that would be <code>luaopen_a_b_c</code>.
+With this facility, a package can pack several C&nbsp;submodules
+into one single library,
+with each submodule keeping its original open function.
+
+
+<p>
+All searchers except the first one (preload) return as the extra value
+the file name where the module was found,
+as returned by <a href="#pdf-package.searchpath"><code>package.searchpath</code></a>.
+The first searcher returns no extra value.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-package.searchpath"><code>package.searchpath (name, path [, sep [, rep]])</code></a></h3>
+
+
+<p>
+Searches for the given <code>name</code> in the given <code>path</code>.
+
+
+<p>
+A path is a string containing a sequence of
+<em>templates</em> separated by semicolons.
+For each template,
+the function replaces each interrogation mark (if any)
+in the template with a copy of <code>name</code>
+wherein all occurrences of <code>sep</code>
+(a dot, by default)
+were replaced by <code>rep</code>
+(the system's directory separator, by default),
+and then tries to open the resulting file name.
+
+
+<p>
+For instance, if the path is the string
+
+<pre>
+ "./?.lua;./?.lc;/usr/local/?/init.lua"
+</pre><p>
+the search for the name <code>foo.a</code>
+will try to open the files
+<code>./foo/a.lua</code>, <code>./foo/a.lc</code>, and
+<code>/usr/local/foo/a/init.lua</code>, in that order.
+
+
+<p>
+Returns the resulting name of the first file that it can
+open in read mode (after closing the file),
+or <b>nil</b> plus an error message if none succeeds.
+(This error message lists all file names it tried to open.)
+
+
+
+
+
+
+
+<h2>6.4 &ndash; <a name="6.4">String Manipulation</a></h2>
+
+<p>
+This library provides generic functions for string manipulation,
+such as finding and extracting substrings, and pattern matching.
+When indexing a string in Lua, the first character is at position&nbsp;1
+(not at&nbsp;0, as in C).
+Indices are allowed to be negative and are interpreted as indexing backwards,
+from the end of the string.
+Thus, the last character is at position -1, and so on.
+
+
+<p>
+The string library provides all its functions inside the table
+<a name="pdf-string"><code>string</code></a>.
+It also sets a metatable for strings
+where the <code>__index</code> field points to the <code>string</code> table.
+Therefore, you can use the string functions in object-oriented style.
+For instance, <code>string.byte(s,i)</code>
+can be written as <code>s:byte(i)</code>.
+
+
+<p>
+The string library assumes one-byte character encodings.
+
+
+<p>
+<hr><h3><a name="pdf-string.byte"><code>string.byte (s [, i [, j]])</code></a></h3>
+Returns the internal numeric codes of the characters <code>s[i]</code>,
+<code>s[i+1]</code>, ..., <code>s[j]</code>.
+The default value for <code>i</code> is&nbsp;1;
+the default value for <code>j</code> is&nbsp;<code>i</code>.
+These indices are corrected
+following the same rules of function <a href="#pdf-string.sub"><code>string.sub</code></a>.
+
+
+<p>
+Numeric codes are not necessarily portable across platforms.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.char"><code>string.char (&middot;&middot;&middot;)</code></a></h3>
+Receives zero or more integers.
+Returns a string with length equal to the number of arguments,
+in which each character has the internal numeric code equal
+to its corresponding argument.
+
+
+<p>
+Numeric codes are not necessarily portable across platforms.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.dump"><code>string.dump (function [, strip])</code></a></h3>
+
+
+<p>
+Returns a string containing a binary representation
+(a <em>binary chunk</em>)
+of the given function,
+so that a later <a href="#pdf-load"><code>load</code></a> on this string returns
+a copy of the function (but with new upvalues).
+If <code>strip</code> is a true value,
+the binary representation may not include all debug information
+about the function,
+to save space.
+
+
+<p>
+Functions with upvalues have only their number of upvalues saved.
+When (re)loaded,
+those upvalues receive fresh instances containing <b>nil</b>.
+(You can use the debug library to serialize
+and reload the upvalues of a function
+in a way adequate to your needs.)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.find"><code>string.find (s, pattern [, init [, plain]])</code></a></h3>
+
+
+<p>
+Looks for the first match of
+<code>pattern</code> (see <a href="#6.4.1">&sect;6.4.1</a>) in the string <code>s</code>.
+If it finds a match, then <code>find</code> returns the indices of&nbsp;<code>s</code>
+where this occurrence starts and ends;
+otherwise, it returns <b>nil</b>.
+A third, optional numeric argument <code>init</code> specifies
+where to start the search;
+its default value is&nbsp;1 and can be negative.
+A value of <b>true</b> as a fourth, optional argument <code>plain</code>
+turns off the pattern matching facilities,
+so the function does a plain "find substring" operation,
+with no characters in <code>pattern</code> being considered magic.
+Note that if <code>plain</code> is given, then <code>init</code> must be given as well.
+
+
+<p>
+If the pattern has captures,
+then in a successful match
+the captured values are also returned,
+after the two indices.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.format"><code>string.format (formatstring, &middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns a formatted version of its variable number of arguments
+following the description given in its first argument (which must be a string).
+The format string follows the same rules as the ISO&nbsp;C function <code>sprintf</code>.
+The only differences are that the options/modifiers
+<code>*</code>, <code>h</code>, <code>L</code>, <code>l</code>, <code>n</code>,
+and <code>p</code> are not supported
+and that there is an extra option, <code>q</code>.
+
+
+<p>
+The <code>q</code> option formats a string between double quotes,
+using escape sequences when necessary to ensure that
+it can safely be read back by the Lua interpreter.
+For instance, the call
+
+<pre>
+ string.format('%q', 'a string with "quotes" and \n new line')
+</pre><p>
+may produce the string:
+
+<pre>
+ "a string with \"quotes\" and \
+ new line"
+</pre>
+
+<p>
+Options
+<code>A</code>, <code>a</code>, <code>E</code>, <code>e</code>, <code>f</code>,
+<code>G</code>, and <code>g</code> all expect a number as argument.
+Options <code>c</code>, <code>d</code>,
+<code>i</code>, <code>o</code>, <code>u</code>, <code>X</code>, and <code>x</code>
+expect an integer.
+When Lua is compiled with a C89 compiler,
+options <code>A</code> and <code>a</code> (hexadecimal floats)
+do not support any modifier (flags, width, length).
+
+
+<p>
+Option <code>s</code> expects a string;
+if its argument is not a string,
+it is converted to one following the same rules of <a href="#pdf-tostring"><code>tostring</code></a>.
+If the option has any modifier (flags, width, length),
+the string argument should not contain embedded zeros.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.gmatch"><code>string.gmatch (s, pattern)</code></a></h3>
+Returns an iterator function that,
+each time it is called,
+returns the next captures from <code>pattern</code> (see <a href="#6.4.1">&sect;6.4.1</a>)
+over the string <code>s</code>.
+If <code>pattern</code> specifies no captures,
+then the whole match is produced in each call.
+
+
+<p>
+As an example, the following loop
+will iterate over all the words from string <code>s</code>,
+printing one per line:
+
+<pre>
+ s = "hello world from Lua"
+ for w in string.gmatch(s, "%a+") do
+ print(w)
+ end
+</pre><p>
+The next example collects all pairs <code>key=value</code> from the
+given string into a table:
+
+<pre>
+ t = {}
+ s = "from=world, to=Lua"
+ for k, v in string.gmatch(s, "(%w+)=(%w+)") do
+ t[k] = v
+ end
+</pre>
+
+<p>
+For this function, a caret '<code>^</code>' at the start of a pattern does not
+work as an anchor, as this would prevent the iteration.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.gsub"><code>string.gsub (s, pattern, repl [, n])</code></a></h3>
+Returns a copy of <code>s</code>
+in which all (or the first <code>n</code>, if given)
+occurrences of the <code>pattern</code> (see <a href="#6.4.1">&sect;6.4.1</a>) have been
+replaced by a replacement string specified by <code>repl</code>,
+which can be a string, a table, or a function.
+<code>gsub</code> also returns, as its second value,
+the total number of matches that occurred.
+The name <code>gsub</code> comes from <em>Global SUBstitution</em>.
+
+
+<p>
+If <code>repl</code> is a string, then its value is used for replacement.
+The character&nbsp;<code>%</code> works as an escape character:
+any sequence in <code>repl</code> of the form <code>%<em>d</em></code>,
+with <em>d</em> between 1 and 9,
+stands for the value of the <em>d</em>-th captured substring.
+The sequence <code>%0</code> stands for the whole match.
+The sequence <code>%%</code> stands for a single&nbsp;<code>%</code>.
+
+
+<p>
+If <code>repl</code> is a table, then the table is queried for every match,
+using the first capture as the key.
+
+
+<p>
+If <code>repl</code> is a function, then this function is called every time a
+match occurs, with all captured substrings passed as arguments,
+in order.
+
+
+<p>
+In any case,
+if the pattern specifies no captures,
+then it behaves as if the whole pattern was inside a capture.
+
+
+<p>
+If the value returned by the table query or by the function call
+is a string or a number,
+then it is used as the replacement string;
+otherwise, if it is <b>false</b> or <b>nil</b>,
+then there is no replacement
+(that is, the original match is kept in the string).
+
+
+<p>
+Here are some examples:
+
+<pre>
+ x = string.gsub("hello world", "(%w+)", "%1 %1")
+ --&gt; x="hello hello world world"
+
+ x = string.gsub("hello world", "%w+", "%0 %0", 1)
+ --&gt; x="hello hello world"
+
+ x = string.gsub("hello world from Lua", "(%w+)%s*(%w+)", "%2 %1")
+ --&gt; x="world hello Lua from"
+
+ x = string.gsub("home = $HOME, user = $USER", "%$(%w+)", os.getenv)
+ --&gt; x="home = /home/roberto, user = roberto"
+
+ x = string.gsub("4+5 = $return 4+5$", "%$(.-)%$", function (s)
+ return load(s)()
+ end)
+ --&gt; x="4+5 = 9"
+
+ local t = {name="lua", version="5.3"}
+ x = string.gsub("$name-$version.tar.gz", "%$(%w+)", t)
+ --&gt; x="lua-5.3.tar.gz"
+</pre>
+
+
+
+<p>
+<hr><h3><a name="pdf-string.len"><code>string.len (s)</code></a></h3>
+Receives a string and returns its length.
+The empty string <code>""</code> has length 0.
+Embedded zeros are counted,
+so <code>"a\000bc\000"</code> has length 5.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.lower"><code>string.lower (s)</code></a></h3>
+Receives a string and returns a copy of this string with all
+uppercase letters changed to lowercase.
+All other characters are left unchanged.
+The definition of what an uppercase letter is depends on the current locale.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.match"><code>string.match (s, pattern [, init])</code></a></h3>
+Looks for the first <em>match</em> of
+<code>pattern</code> (see <a href="#6.4.1">&sect;6.4.1</a>) in the string <code>s</code>.
+If it finds one, then <code>match</code> returns
+the captures from the pattern;
+otherwise it returns <b>nil</b>.
+If <code>pattern</code> specifies no captures,
+then the whole match is returned.
+A third, optional numeric argument <code>init</code> specifies
+where to start the search;
+its default value is&nbsp;1 and can be negative.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.pack"><code>string.pack (fmt, v1, v2, &middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns a binary string containing the values <code>v1</code>, <code>v2</code>, etc.
+packed (that is, serialized in binary form)
+according to the format string <code>fmt</code> (see <a href="#6.4.2">&sect;6.4.2</a>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.packsize"><code>string.packsize (fmt)</code></a></h3>
+
+
+<p>
+Returns the size of a string resulting from <a href="#pdf-string.pack"><code>string.pack</code></a>
+with the given format.
+The format string cannot have the variable-length options
+'<code>s</code>' or '<code>z</code>' (see <a href="#6.4.2">&sect;6.4.2</a>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.rep"><code>string.rep (s, n [, sep])</code></a></h3>
+Returns a string that is the concatenation of <code>n</code> copies of
+the string <code>s</code> separated by the string <code>sep</code>.
+The default value for <code>sep</code> is the empty string
+(that is, no separator).
+Returns the empty string if <code>n</code> is not positive.
+
+
+<p>
+(Note that it is very easy to exhaust the memory of your machine
+with a single call to this function.)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.reverse"><code>string.reverse (s)</code></a></h3>
+Returns a string that is the string <code>s</code> reversed.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.sub"><code>string.sub (s, i [, j])</code></a></h3>
+Returns the substring of <code>s</code> that
+starts at <code>i</code> and continues until <code>j</code>;
+<code>i</code> and <code>j</code> can be negative.
+If <code>j</code> is absent, then it is assumed to be equal to -1
+(which is the same as the string length).
+In particular,
+the call <code>string.sub(s,1,j)</code> returns a prefix of <code>s</code>
+with length <code>j</code>,
+and <code>string.sub(s, -i)</code> (for a positive <code>i</code>)
+returns a suffix of <code>s</code>
+with length <code>i</code>.
+
+
+<p>
+If, after the translation of negative indices,
+<code>i</code> is less than 1,
+it is corrected to 1.
+If <code>j</code> is greater than the string length,
+it is corrected to that length.
+If, after these corrections,
+<code>i</code> is greater than <code>j</code>,
+the function returns the empty string.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.unpack"><code>string.unpack (fmt, s [, pos])</code></a></h3>
+
+
+<p>
+Returns the values packed in string <code>s</code> (see <a href="#pdf-string.pack"><code>string.pack</code></a>)
+according to the format string <code>fmt</code> (see <a href="#6.4.2">&sect;6.4.2</a>).
+An optional <code>pos</code> marks where
+to start reading in <code>s</code> (default is 1).
+After the read values,
+this function also returns the index of the first unread byte in <code>s</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-string.upper"><code>string.upper (s)</code></a></h3>
+Receives a string and returns a copy of this string with all
+lowercase letters changed to uppercase.
+All other characters are left unchanged.
+The definition of what a lowercase letter is depends on the current locale.
+
+
+
+
+
+<h3>6.4.1 &ndash; <a name="6.4.1">Patterns</a></h3>
+
+<p>
+Patterns in Lua are described by regular strings,
+which are interpreted as patterns by the pattern-matching functions
+<a href="#pdf-string.find"><code>string.find</code></a>,
+<a href="#pdf-string.gmatch"><code>string.gmatch</code></a>,
+<a href="#pdf-string.gsub"><code>string.gsub</code></a>,
+and <a href="#pdf-string.match"><code>string.match</code></a>.
+This section describes the syntax and the meaning
+(that is, what they match) of these strings.
+
+
+
+<h4>Character Class:</h4><p>
+A <em>character class</em> is used to represent a set of characters.
+The following combinations are allowed in describing a character class:
+
+<ul>
+
+<li><b><em>x</em>: </b>
+(where <em>x</em> is not one of the <em>magic characters</em>
+<code>^$()%.[]*+-?</code>)
+represents the character <em>x</em> itself.
+</li>
+
+<li><b><code>.</code>: </b> (a dot) represents all characters.</li>
+
+<li><b><code>%a</code>: </b> represents all letters.</li>
+
+<li><b><code>%c</code>: </b> represents all control characters.</li>
+
+<li><b><code>%d</code>: </b> represents all digits.</li>
+
+<li><b><code>%g</code>: </b> represents all printable characters except space.</li>
+
+<li><b><code>%l</code>: </b> represents all lowercase letters.</li>
+
+<li><b><code>%p</code>: </b> represents all punctuation characters.</li>
+
+<li><b><code>%s</code>: </b> represents all space characters.</li>
+
+<li><b><code>%u</code>: </b> represents all uppercase letters.</li>
+
+<li><b><code>%w</code>: </b> represents all alphanumeric characters.</li>
+
+<li><b><code>%x</code>: </b> represents all hexadecimal digits.</li>
+
+<li><b><code>%<em>x</em></code>: </b> (where <em>x</em> is any non-alphanumeric character)
+represents the character <em>x</em>.
+This is the standard way to escape the magic characters.
+Any non-alphanumeric character
+(including all punctuation characters, even the non-magical)
+can be preceded by a '<code>%</code>'
+when used to represent itself in a pattern.
+</li>
+
+<li><b><code>[<em>set</em>]</code>: </b>
+represents the class which is the union of all
+characters in <em>set</em>.
+A range of characters can be specified by
+separating the end characters of the range,
+in ascending order, with a '<code>-</code>'.
+All classes <code>%</code><em>x</em> described above can also be used as
+components in <em>set</em>.
+All other characters in <em>set</em> represent themselves.
+For example, <code>[%w_]</code> (or <code>[_%w]</code>)
+represents all alphanumeric characters plus the underscore,
+<code>[0-7]</code> represents the octal digits,
+and <code>[0-7%l%-]</code> represents the octal digits plus
+the lowercase letters plus the '<code>-</code>' character.
+
+
+<p>
+You can put a closing square bracket in a set
+by positioning it as the first character in the set.
+You can put an hyphen in a set
+by positioning it as the first or the last character in the set.
+(You can also use an escape for both cases.)
+
+
+<p>
+The interaction between ranges and classes is not defined.
+Therefore, patterns like <code>[%a-z]</code> or <code>[a-%%]</code>
+have no meaning.
+</li>
+
+<li><b><code>[^<em>set</em>]</code>: </b>
+represents the complement of <em>set</em>,
+where <em>set</em> is interpreted as above.
+</li>
+
+</ul><p>
+For all classes represented by single letters (<code>%a</code>, <code>%c</code>, etc.),
+the corresponding uppercase letter represents the complement of the class.
+For instance, <code>%S</code> represents all non-space characters.
+
+
+<p>
+The definitions of letter, space, and other character groups
+depend on the current locale.
+In particular, the class <code>[a-z]</code> may not be equivalent to <code>%l</code>.
+
+
+
+
+
+<h4>Pattern Item:</h4><p>
+A <em>pattern item</em> can be
+
+<ul>
+
+<li>
+a single character class,
+which matches any single character in the class;
+</li>
+
+<li>
+a single character class followed by '<code>*</code>',
+which matches zero or more repetitions of characters in the class.
+These repetition items will always match the longest possible sequence;
+</li>
+
+<li>
+a single character class followed by '<code>+</code>',
+which matches one or more repetitions of characters in the class.
+These repetition items will always match the longest possible sequence;
+</li>
+
+<li>
+a single character class followed by '<code>-</code>',
+which also matches zero or more repetitions of characters in the class.
+Unlike '<code>*</code>',
+these repetition items will always match the shortest possible sequence;
+</li>
+
+<li>
+a single character class followed by '<code>?</code>',
+which matches zero or one occurrence of a character in the class.
+It always matches one occurrence if possible;
+</li>
+
+<li>
+<code>%<em>n</em></code>, for <em>n</em> between 1 and 9;
+such item matches a substring equal to the <em>n</em>-th captured string
+(see below);
+</li>
+
+<li>
+<code>%b<em>xy</em></code>, where <em>x</em> and <em>y</em> are two distinct characters;
+such item matches strings that start with&nbsp;<em>x</em>, end with&nbsp;<em>y</em>,
+and where the <em>x</em> and <em>y</em> are <em>balanced</em>.
+This means that, if one reads the string from left to right,
+counting <em>+1</em> for an <em>x</em> and <em>-1</em> for a <em>y</em>,
+the ending <em>y</em> is the first <em>y</em> where the count reaches 0.
+For instance, the item <code>%b()</code> matches expressions with
+balanced parentheses.
+</li>
+
+<li>
+<code>%f[<em>set</em>]</code>, a <em>frontier pattern</em>;
+such item matches an empty string at any position such that
+the next character belongs to <em>set</em>
+and the previous character does not belong to <em>set</em>.
+The set <em>set</em> is interpreted as previously described.
+The beginning and the end of the subject are handled as if
+they were the character '<code>\0</code>'.
+</li>
+
+</ul>
+
+
+
+
+<h4>Pattern:</h4><p>
+A <em>pattern</em> is a sequence of pattern items.
+A caret '<code>^</code>' at the beginning of a pattern anchors the match at the
+beginning of the subject string.
+A '<code>$</code>' at the end of a pattern anchors the match at the
+end of the subject string.
+At other positions,
+'<code>^</code>' and '<code>$</code>' have no special meaning and represent themselves.
+
+
+
+
+
+<h4>Captures:</h4><p>
+A pattern can contain sub-patterns enclosed in parentheses;
+they describe <em>captures</em>.
+When a match succeeds, the substrings of the subject string
+that match captures are stored (<em>captured</em>) for future use.
+Captures are numbered according to their left parentheses.
+For instance, in the pattern <code>"(a*(.)%w(%s*))"</code>,
+the part of the string matching <code>"a*(.)%w(%s*)"</code> is
+stored as the first capture (and therefore has number&nbsp;1);
+the character matching "<code>.</code>" is captured with number&nbsp;2,
+and the part matching "<code>%s*</code>" has number&nbsp;3.
+
+
+<p>
+As a special case, the empty capture <code>()</code> captures
+the current string position (a number).
+For instance, if we apply the pattern <code>"()aa()"</code> on the
+string <code>"flaaap"</code>, there will be two captures: 3&nbsp;and&nbsp;5.
+
+
+
+
+
+
+
+<h3>6.4.2 &ndash; <a name="6.4.2">Format Strings for Pack and Unpack</a></h3>
+
+<p>
+The first argument to <a href="#pdf-string.pack"><code>string.pack</code></a>,
+<a href="#pdf-string.packsize"><code>string.packsize</code></a>, and <a href="#pdf-string.unpack"><code>string.unpack</code></a>
+is a format string,
+which describes the layout of the structure being created or read.
+
+
+<p>
+A format string is a sequence of conversion options.
+The conversion options are as follows:
+
+<ul>
+<li><b><code>&lt;</code>: </b>sets little endian</li>
+<li><b><code>&gt;</code>: </b>sets big endian</li>
+<li><b><code>=</code>: </b>sets native endian</li>
+<li><b><code>![<em>n</em>]</code>: </b>sets maximum alignment to <code>n</code>
+(default is native alignment)</li>
+<li><b><code>b</code>: </b>a signed byte (<code>char</code>)</li>
+<li><b><code>B</code>: </b>an unsigned byte (<code>char</code>)</li>
+<li><b><code>h</code>: </b>a signed <code>short</code> (native size)</li>
+<li><b><code>H</code>: </b>an unsigned <code>short</code> (native size)</li>
+<li><b><code>l</code>: </b>a signed <code>long</code> (native size)</li>
+<li><b><code>L</code>: </b>an unsigned <code>long</code> (native size)</li>
+<li><b><code>j</code>: </b>a <code>lua_Integer</code></li>
+<li><b><code>J</code>: </b>a <code>lua_Unsigned</code></li>
+<li><b><code>T</code>: </b>a <code>size_t</code> (native size)</li>
+<li><b><code>i[<em>n</em>]</code>: </b>a signed <code>int</code> with <code>n</code> bytes
+(default is native size)</li>
+<li><b><code>I[<em>n</em>]</code>: </b>an unsigned <code>int</code> with <code>n</code> bytes
+(default is native size)</li>
+<li><b><code>f</code>: </b>a <code>float</code> (native size)</li>
+<li><b><code>d</code>: </b>a <code>double</code> (native size)</li>
+<li><b><code>n</code>: </b>a <code>lua_Number</code></li>
+<li><b><code>c<em>n</em></code>: </b>a fixed-sized string with <code>n</code> bytes</li>
+<li><b><code>z</code>: </b>a zero-terminated string</li>
+<li><b><code>s[<em>n</em>]</code>: </b>a string preceded by its length
+coded as an unsigned integer with <code>n</code> bytes
+(default is a <code>size_t</code>)</li>
+<li><b><code>x</code>: </b>one byte of padding</li>
+<li><b><code>X<em>op</em></code>: </b>an empty item that aligns
+according to option <code>op</code>
+(which is otherwise ignored)</li>
+<li><b>'<code> </code>': </b>(empty space) ignored</li>
+</ul><p>
+(A "<code>[<em>n</em>]</code>" means an optional integral numeral.)
+Except for padding, spaces, and configurations
+(options "<code>xX &lt;=&gt;!</code>"),
+each option corresponds to an argument (in <a href="#pdf-string.pack"><code>string.pack</code></a>)
+or a result (in <a href="#pdf-string.unpack"><code>string.unpack</code></a>).
+
+
+<p>
+For options "<code>!<em>n</em></code>", "<code>s<em>n</em></code>", "<code>i<em>n</em></code>", and "<code>I<em>n</em></code>",
+<code>n</code> can be any integer between 1 and 16.
+All integral options check overflows;
+<a href="#pdf-string.pack"><code>string.pack</code></a> checks whether the given value fits in the given size;
+<a href="#pdf-string.unpack"><code>string.unpack</code></a> checks whether the read value fits in a Lua integer.
+
+
+<p>
+Any format string starts as if prefixed by "<code>!1=</code>",
+that is,
+with maximum alignment of 1 (no alignment)
+and native endianness.
+
+
+<p>
+Alignment works as follows:
+For each option,
+the format gets extra padding until the data starts
+at an offset that is a multiple of the minimum between the
+option size and the maximum alignment;
+this minimum must be a power of 2.
+Options "<code>c</code>" and "<code>z</code>" are not aligned;
+option "<code>s</code>" follows the alignment of its starting integer.
+
+
+<p>
+All padding is filled with zeros by <a href="#pdf-string.pack"><code>string.pack</code></a>
+(and ignored by <a href="#pdf-string.unpack"><code>string.unpack</code></a>).
+
+
+
+
+
+
+
+<h2>6.5 &ndash; <a name="6.5">UTF-8 Support</a></h2>
+
+<p>
+This library provides basic support for UTF-8 encoding.
+It provides all its functions inside the table <a name="pdf-utf8"><code>utf8</code></a>.
+This library does not provide any support for Unicode other
+than the handling of the encoding.
+Any operation that needs the meaning of a character,
+such as character classification, is outside its scope.
+
+
+<p>
+Unless stated otherwise,
+all functions that expect a byte position as a parameter
+assume that the given position is either the start of a byte sequence
+or one plus the length of the subject string.
+As in the string library,
+negative indices count from the end of the string.
+
+
+<p>
+<hr><h3><a name="pdf-utf8.char"><code>utf8.char (&middot;&middot;&middot;)</code></a></h3>
+Receives zero or more integers,
+converts each one to its corresponding UTF-8 byte sequence
+and returns a string with the concatenation of all these sequences.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-utf8.charpattern"><code>utf8.charpattern</code></a></h3>
+The pattern (a string, not a function) "<code>[\0-\x7F\xC2-\xF4][\x80-\xBF]*</code>"
+(see <a href="#6.4.1">&sect;6.4.1</a>),
+which matches exactly one UTF-8 byte sequence,
+assuming that the subject is a valid UTF-8 string.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-utf8.codes"><code>utf8.codes (s)</code></a></h3>
+
+
+<p>
+Returns values so that the construction
+
+<pre>
+ for p, c in utf8.codes(s) do <em>body</em> end
+</pre><p>
+will iterate over all characters in string <code>s</code>,
+with <code>p</code> being the position (in bytes) and <code>c</code> the code point
+of each character.
+It raises an error if it meets any invalid byte sequence.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-utf8.codepoint"><code>utf8.codepoint (s [, i [, j]])</code></a></h3>
+Returns the codepoints (as integers) from all characters in <code>s</code>
+that start between byte position <code>i</code> and <code>j</code> (both included).
+The default for <code>i</code> is 1 and for <code>j</code> is <code>i</code>.
+It raises an error if it meets any invalid byte sequence.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-utf8.len"><code>utf8.len (s [, i [, j]])</code></a></h3>
+Returns the number of UTF-8 characters in string <code>s</code>
+that start between positions <code>i</code> and <code>j</code> (both inclusive).
+The default for <code>i</code> is 1 and for <code>j</code> is -1.
+If it finds any invalid byte sequence,
+returns a false value plus the position of the first invalid byte.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-utf8.offset"><code>utf8.offset (s, n [, i])</code></a></h3>
+Returns the position (in bytes) where the encoding of the
+<code>n</code>-th character of <code>s</code>
+(counting from position <code>i</code>) starts.
+A negative <code>n</code> gets characters before position <code>i</code>.
+The default for <code>i</code> is 1 when <code>n</code> is non-negative
+and <code>#s + 1</code> otherwise,
+so that <code>utf8.offset(s, -n)</code> gets the offset of the
+<code>n</code>-th character from the end of the string.
+If the specified character is neither in the subject
+nor right after its end,
+the function returns <b>nil</b>.
+
+
+<p>
+As a special case,
+when <code>n</code> is 0 the function returns the start of the encoding
+of the character that contains the <code>i</code>-th byte of <code>s</code>.
+
+
+<p>
+This function assumes that <code>s</code> is a valid UTF-8 string.
+
+
+
+
+
+
+
+<h2>6.6 &ndash; <a name="6.6">Table Manipulation</a></h2>
+
+<p>
+This library provides generic functions for table manipulation.
+It provides all its functions inside the table <a name="pdf-table"><code>table</code></a>.
+
+
+<p>
+Remember that, whenever an operation needs the length of a table,
+all caveats about the length operator apply (see <a href="#3.4.7">&sect;3.4.7</a>).
+All functions ignore non-numeric keys
+in the tables given as arguments.
+
+
+<p>
+<hr><h3><a name="pdf-table.concat"><code>table.concat (list [, sep [, i [, j]]])</code></a></h3>
+
+
+<p>
+Given a list where all elements are strings or numbers,
+returns the string <code>list[i]..sep..list[i+1] &middot;&middot;&middot; sep..list[j]</code>.
+The default value for <code>sep</code> is the empty string,
+the default for <code>i</code> is 1,
+and the default for <code>j</code> is <code>#list</code>.
+If <code>i</code> is greater than <code>j</code>, returns the empty string.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.insert"><code>table.insert (list, [pos,] value)</code></a></h3>
+
+
+<p>
+Inserts element <code>value</code> at position <code>pos</code> in <code>list</code>,
+shifting up the elements
+<code>list[pos], list[pos+1], &middot;&middot;&middot;, list[#list]</code>.
+The default value for <code>pos</code> is <code>#list+1</code>,
+so that a call <code>table.insert(t,x)</code> inserts <code>x</code> at the end
+of list <code>t</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.move"><code>table.move (a1, f, e, t [,a2])</code></a></h3>
+
+
+<p>
+Moves elements from table <code>a1</code> to table <code>a2</code>,
+performing the equivalent to the following
+multiple assignment:
+<code>a2[t],&middot;&middot;&middot; = a1[f],&middot;&middot;&middot;,a1[e]</code>.
+The default for <code>a2</code> is <code>a1</code>.
+The destination range can overlap with the source range.
+The number of elements to be moved must fit in a Lua integer.
+
+
+<p>
+Returns the destination table <code>a2</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.pack"><code>table.pack (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns a new table with all parameters stored into keys 1, 2, etc.
+and with a field "<code>n</code>" with the total number of parameters.
+Note that the resulting table may not be a sequence.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.remove"><code>table.remove (list [, pos])</code></a></h3>
+
+
+<p>
+Removes from <code>list</code> the element at position <code>pos</code>,
+returning the value of the removed element.
+When <code>pos</code> is an integer between 1 and <code>#list</code>,
+it shifts down the elements
+<code>list[pos+1], list[pos+2], &middot;&middot;&middot;, list[#list]</code>
+and erases element <code>list[#list]</code>;
+The index <code>pos</code> can also be 0 when <code>#list</code> is 0,
+or <code>#list + 1</code>;
+in those cases, the function erases the element <code>list[pos]</code>.
+
+
+<p>
+The default value for <code>pos</code> is <code>#list</code>,
+so that a call <code>table.remove(l)</code> removes the last element
+of list <code>l</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.sort"><code>table.sort (list [, comp])</code></a></h3>
+
+
+<p>
+Sorts list elements in a given order, <em>in-place</em>,
+from <code>list[1]</code> to <code>list[#list]</code>.
+If <code>comp</code> is given,
+then it must be a function that receives two list elements
+and returns true when the first element must come
+before the second in the final order
+(so that, after the sort,
+<code>i &lt; j</code> implies <code>not comp(list[j],list[i])</code>).
+If <code>comp</code> is not given,
+then the standard Lua operator <code>&lt;</code> is used instead.
+
+
+<p>
+Note that the <code>comp</code> function must define
+a strict partial order over the elements in the list;
+that is, it must be asymmetric and transitive.
+Otherwise, no valid sort may be possible.
+
+
+<p>
+The sort algorithm is not stable:
+elements considered equal by the given order
+may have their relative positions changed by the sort.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-table.unpack"><code>table.unpack (list [, i [, j]])</code></a></h3>
+
+
+<p>
+Returns the elements from the given list.
+This function is equivalent to
+
+<pre>
+ return list[i], list[i+1], &middot;&middot;&middot;, list[j]
+</pre><p>
+By default, <code>i</code> is&nbsp;1 and <code>j</code> is <code>#list</code>.
+
+
+
+
+
+
+
+<h2>6.7 &ndash; <a name="6.7">Mathematical Functions</a></h2>
+
+<p>
+This library provides basic mathematical functions.
+It provides all its functions and constants inside the table <a name="pdf-math"><code>math</code></a>.
+Functions with the annotation "<code>integer/float</code>" give
+integer results for integer arguments
+and float results for float (or mixed) arguments.
+Rounding functions
+(<a href="#pdf-math.ceil"><code>math.ceil</code></a>, <a href="#pdf-math.floor"><code>math.floor</code></a>, and <a href="#pdf-math.modf"><code>math.modf</code></a>)
+return an integer when the result fits in the range of an integer,
+or a float otherwise.
+
+
+<p>
+<hr><h3><a name="pdf-math.abs"><code>math.abs (x)</code></a></h3>
+
+
+<p>
+Returns the absolute value of <code>x</code>. (integer/float)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.acos"><code>math.acos (x)</code></a></h3>
+
+
+<p>
+Returns the arc cosine of <code>x</code> (in radians).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.asin"><code>math.asin (x)</code></a></h3>
+
+
+<p>
+Returns the arc sine of <code>x</code> (in radians).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.atan"><code>math.atan (y [, x])</code></a></h3>
+
+
+<p>
+
+Returns the arc tangent of <code>y/x</code> (in radians),
+but uses the signs of both parameters to find the
+quadrant of the result.
+(It also handles correctly the case of <code>x</code> being zero.)
+
+
+<p>
+The default value for <code>x</code> is 1,
+so that the call <code>math.atan(y)</code>
+returns the arc tangent of <code>y</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.ceil"><code>math.ceil (x)</code></a></h3>
+
+
+<p>
+Returns the smallest integral value larger than or equal to <code>x</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.cos"><code>math.cos (x)</code></a></h3>
+
+
+<p>
+Returns the cosine of <code>x</code> (assumed to be in radians).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.deg"><code>math.deg (x)</code></a></h3>
+
+
+<p>
+Converts the angle <code>x</code> from radians to degrees.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.exp"><code>math.exp (x)</code></a></h3>
+
+
+<p>
+Returns the value <em>e<sup>x</sup></em>
+(where <code>e</code> is the base of natural logarithms).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.floor"><code>math.floor (x)</code></a></h3>
+
+
+<p>
+Returns the largest integral value smaller than or equal to <code>x</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.fmod"><code>math.fmod (x, y)</code></a></h3>
+
+
+<p>
+Returns the remainder of the division of <code>x</code> by <code>y</code>
+that rounds the quotient towards zero. (integer/float)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.huge"><code>math.huge</code></a></h3>
+
+
+<p>
+The float value <code>HUGE_VAL</code>,
+a value larger than any other numeric value.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.log"><code>math.log (x [, base])</code></a></h3>
+
+
+<p>
+Returns the logarithm of <code>x</code> in the given base.
+The default for <code>base</code> is <em>e</em>
+(so that the function returns the natural logarithm of <code>x</code>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.max"><code>math.max (x, &middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns the argument with the maximum value,
+according to the Lua operator <code>&lt;</code>. (integer/float)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.maxinteger"><code>math.maxinteger</code></a></h3>
+An integer with the maximum value for an integer.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.min"><code>math.min (x, &middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns the argument with the minimum value,
+according to the Lua operator <code>&lt;</code>. (integer/float)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.mininteger"><code>math.mininteger</code></a></h3>
+An integer with the minimum value for an integer.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.modf"><code>math.modf (x)</code></a></h3>
+
+
+<p>
+Returns the integral part of <code>x</code> and the fractional part of <code>x</code>.
+Its second result is always a float.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.pi"><code>math.pi</code></a></h3>
+
+
+<p>
+The value of <em>&pi;</em>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.rad"><code>math.rad (x)</code></a></h3>
+
+
+<p>
+Converts the angle <code>x</code> from degrees to radians.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.random"><code>math.random ([m [, n]])</code></a></h3>
+
+
+<p>
+When called without arguments,
+returns a pseudo-random float with uniform distribution
+in the range <em>[0,1)</em>.
+When called with two integers <code>m</code> and <code>n</code>,
+<code>math.random</code> returns a pseudo-random integer
+with uniform distribution in the range <em>[m, n]</em>.
+(The value <em>n-m</em> cannot be negative and must fit in a Lua integer.)
+The call <code>math.random(n)</code> is equivalent to <code>math.random(1,n)</code>.
+
+
+<p>
+This function is an interface to the underling
+pseudo-random generator function provided by C.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.randomseed"><code>math.randomseed (x)</code></a></h3>
+
+
+<p>
+Sets <code>x</code> as the "seed"
+for the pseudo-random generator:
+equal seeds produce equal sequences of numbers.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.sin"><code>math.sin (x)</code></a></h3>
+
+
+<p>
+Returns the sine of <code>x</code> (assumed to be in radians).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.sqrt"><code>math.sqrt (x)</code></a></h3>
+
+
+<p>
+Returns the square root of <code>x</code>.
+(You can also use the expression <code>x^0.5</code> to compute this value.)
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.tan"><code>math.tan (x)</code></a></h3>
+
+
+<p>
+Returns the tangent of <code>x</code> (assumed to be in radians).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.tointeger"><code>math.tointeger (x)</code></a></h3>
+
+
+<p>
+If the value <code>x</code> is convertible to an integer,
+returns that integer.
+Otherwise, returns <b>nil</b>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.type"><code>math.type (x)</code></a></h3>
+
+
+<p>
+Returns "<code>integer</code>" if <code>x</code> is an integer,
+"<code>float</code>" if it is a float,
+or <b>nil</b> if <code>x</code> is not a number.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-math.ult"><code>math.ult (m, n)</code></a></h3>
+
+
+<p>
+Returns a boolean,
+true if and only if integer <code>m</code> is below integer <code>n</code> when
+they are compared as unsigned integers.
+
+
+
+
+
+
+
+<h2>6.8 &ndash; <a name="6.8">Input and Output Facilities</a></h2>
+
+<p>
+The I/O library provides two different styles for file manipulation.
+The first one uses implicit file handles;
+that is, there are operations to set a default input file and a
+default output file,
+and all input/output operations are over these default files.
+The second style uses explicit file handles.
+
+
+<p>
+When using implicit file handles,
+all operations are supplied by table <a name="pdf-io"><code>io</code></a>.
+When using explicit file handles,
+the operation <a href="#pdf-io.open"><code>io.open</code></a> returns a file handle
+and then all operations are supplied as methods of the file handle.
+
+
+<p>
+The table <code>io</code> also provides
+three predefined file handles with their usual meanings from C:
+<a name="pdf-io.stdin"><code>io.stdin</code></a>, <a name="pdf-io.stdout"><code>io.stdout</code></a>, and <a name="pdf-io.stderr"><code>io.stderr</code></a>.
+The I/O library never closes these files.
+
+
+<p>
+Unless otherwise stated,
+all I/O functions return <b>nil</b> on failure
+(plus an error message as a second result and
+a system-dependent error code as a third result)
+and some value different from <b>nil</b> on success.
+On non-POSIX systems,
+the computation of the error message and error code
+in case of errors
+may be not thread safe,
+because they rely on the global C variable <code>errno</code>.
+
+
+<p>
+<hr><h3><a name="pdf-io.close"><code>io.close ([file])</code></a></h3>
+
+
+<p>
+Equivalent to <code>file:close()</code>.
+Without a <code>file</code>, closes the default output file.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.flush"><code>io.flush ()</code></a></h3>
+
+
+<p>
+Equivalent to <code>io.output():flush()</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.input"><code>io.input ([file])</code></a></h3>
+
+
+<p>
+When called with a file name, it opens the named file (in text mode),
+and sets its handle as the default input file.
+When called with a file handle,
+it simply sets this file handle as the default input file.
+When called without parameters,
+it returns the current default input file.
+
+
+<p>
+In case of errors this function raises the error,
+instead of returning an error code.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.lines"><code>io.lines ([filename, &middot;&middot;&middot;])</code></a></h3>
+
+
+<p>
+Opens the given file name in read mode
+and returns an iterator function that
+works like <code>file:lines(&middot;&middot;&middot;)</code> over the opened file.
+When the iterator function detects the end of file,
+it returns no values (to finish the loop) and automatically closes the file.
+
+
+<p>
+The call <code>io.lines()</code> (with no file name) is equivalent
+to <code>io.input():lines("*l")</code>;
+that is, it iterates over the lines of the default input file.
+In this case it does not close the file when the loop ends.
+
+
+<p>
+In case of errors this function raises the error,
+instead of returning an error code.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.open"><code>io.open (filename [, mode])</code></a></h3>
+
+
+<p>
+This function opens a file,
+in the mode specified in the string <code>mode</code>.
+In case of success,
+it returns a new file handle.
+
+
+<p>
+The <code>mode</code> string can be any of the following:
+
+<ul>
+<li><b>"<code>r</code>": </b> read mode (the default);</li>
+<li><b>"<code>w</code>": </b> write mode;</li>
+<li><b>"<code>a</code>": </b> append mode;</li>
+<li><b>"<code>r+</code>": </b> update mode, all previous data is preserved;</li>
+<li><b>"<code>w+</code>": </b> update mode, all previous data is erased;</li>
+<li><b>"<code>a+</code>": </b> append update mode, previous data is preserved,
+ writing is only allowed at the end of file.</li>
+</ul><p>
+The <code>mode</code> string can also have a '<code>b</code>' at the end,
+which is needed in some systems to open the file in binary mode.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.output"><code>io.output ([file])</code></a></h3>
+
+
+<p>
+Similar to <a href="#pdf-io.input"><code>io.input</code></a>, but operates over the default output file.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.popen"><code>io.popen (prog [, mode])</code></a></h3>
+
+
+<p>
+This function is system dependent and is not available
+on all platforms.
+
+
+<p>
+Starts program <code>prog</code> in a separated process and returns
+a file handle that you can use to read data from this program
+(if <code>mode</code> is <code>"r"</code>, the default)
+or to write data to this program
+(if <code>mode</code> is <code>"w"</code>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.read"><code>io.read (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Equivalent to <code>io.input():read(&middot;&middot;&middot;)</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.tmpfile"><code>io.tmpfile ()</code></a></h3>
+
+
+<p>
+In case of success,
+returns a handle for a temporary file.
+This file is opened in update mode
+and it is automatically removed when the program ends.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.type"><code>io.type (obj)</code></a></h3>
+
+
+<p>
+Checks whether <code>obj</code> is a valid file handle.
+Returns the string <code>"file"</code> if <code>obj</code> is an open file handle,
+<code>"closed file"</code> if <code>obj</code> is a closed file handle,
+or <b>nil</b> if <code>obj</code> is not a file handle.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-io.write"><code>io.write (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Equivalent to <code>io.output():write(&middot;&middot;&middot;)</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:close"><code>file:close ()</code></a></h3>
+
+
+<p>
+Closes <code>file</code>.
+Note that files are automatically closed when
+their handles are garbage collected,
+but that takes an unpredictable amount of time to happen.
+
+
+<p>
+When closing a file handle created with <a href="#pdf-io.popen"><code>io.popen</code></a>,
+<a href="#pdf-file:close"><code>file:close</code></a> returns the same values
+returned by <a href="#pdf-os.execute"><code>os.execute</code></a>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:flush"><code>file:flush ()</code></a></h3>
+
+
+<p>
+Saves any written data to <code>file</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:lines"><code>file:lines (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Returns an iterator function that,
+each time it is called,
+reads the file according to the given formats.
+When no format is given,
+uses "<code>l</code>" as a default.
+As an example, the construction
+
+<pre>
+ for c in file:lines(1) do <em>body</em> end
+</pre><p>
+will iterate over all characters of the file,
+starting at the current position.
+Unlike <a href="#pdf-io.lines"><code>io.lines</code></a>, this function does not close the file
+when the loop ends.
+
+
+<p>
+In case of errors this function raises the error,
+instead of returning an error code.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:read"><code>file:read (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Reads the file <code>file</code>,
+according to the given formats, which specify what to read.
+For each format,
+the function returns a string or a number with the characters read,
+or <b>nil</b> if it cannot read data with the specified format.
+(In this latter case,
+the function does not read subsequent formats.)
+When called without formats,
+it uses a default format that reads the next line
+(see below).
+
+
+<p>
+The available formats are
+
+<ul>
+
+<li><b>"<code>n</code>": </b>
+reads a numeral and returns it as a float or an integer,
+following the lexical conventions of Lua.
+(The numeral may have leading spaces and a sign.)
+This format always reads the longest input sequence that
+is a valid prefix for a numeral;
+if that prefix does not form a valid numeral
+(e.g., an empty string, "<code>0x</code>", or "<code>3.4e-</code>"),
+it is discarded and the function returns <b>nil</b>.
+</li>
+
+<li><b>"<code>a</code>": </b>
+reads the whole file, starting at the current position.
+On end of file, it returns the empty string.
+</li>
+
+<li><b>"<code>l</code>": </b>
+reads the next line skipping the end of line,
+returning <b>nil</b> on end of file.
+This is the default format.
+</li>
+
+<li><b>"<code>L</code>": </b>
+reads the next line keeping the end-of-line character (if present),
+returning <b>nil</b> on end of file.
+</li>
+
+<li><b><em>number</em>: </b>
+reads a string with up to this number of bytes,
+returning <b>nil</b> on end of file.
+If <code>number</code> is zero,
+it reads nothing and returns an empty string,
+or <b>nil</b> on end of file.
+</li>
+
+</ul><p>
+The formats "<code>l</code>" and "<code>L</code>" should be used only for text files.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:seek"><code>file:seek ([whence [, offset]])</code></a></h3>
+
+
+<p>
+Sets and gets the file position,
+measured from the beginning of the file,
+to the position given by <code>offset</code> plus a base
+specified by the string <code>whence</code>, as follows:
+
+<ul>
+<li><b>"<code>set</code>": </b> base is position 0 (beginning of the file);</li>
+<li><b>"<code>cur</code>": </b> base is current position;</li>
+<li><b>"<code>end</code>": </b> base is end of file;</li>
+</ul><p>
+In case of success, <code>seek</code> returns the final file position,
+measured in bytes from the beginning of the file.
+If <code>seek</code> fails, it returns <b>nil</b>,
+plus a string describing the error.
+
+
+<p>
+The default value for <code>whence</code> is <code>"cur"</code>,
+and for <code>offset</code> is 0.
+Therefore, the call <code>file:seek()</code> returns the current
+file position, without changing it;
+the call <code>file:seek("set")</code> sets the position to the
+beginning of the file (and returns 0);
+and the call <code>file:seek("end")</code> sets the position to the
+end of the file, and returns its size.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:setvbuf"><code>file:setvbuf (mode [, size])</code></a></h3>
+
+
+<p>
+Sets the buffering mode for an output file.
+There are three available modes:
+
+<ul>
+
+<li><b>"<code>no</code>": </b>
+no buffering; the result of any output operation appears immediately.
+</li>
+
+<li><b>"<code>full</code>": </b>
+full buffering; output operation is performed only
+when the buffer is full or when
+you explicitly <code>flush</code> the file (see <a href="#pdf-io.flush"><code>io.flush</code></a>).
+</li>
+
+<li><b>"<code>line</code>": </b>
+line buffering; output is buffered until a newline is output
+or there is any input from some special files
+(such as a terminal device).
+</li>
+
+</ul><p>
+For the last two cases, <code>size</code>
+specifies the size of the buffer, in bytes.
+The default is an appropriate size.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-file:write"><code>file:write (&middot;&middot;&middot;)</code></a></h3>
+
+
+<p>
+Writes the value of each of its arguments to <code>file</code>.
+The arguments must be strings or numbers.
+
+
+<p>
+In case of success, this function returns <code>file</code>.
+Otherwise it returns <b>nil</b> plus a string describing the error.
+
+
+
+
+
+
+
+<h2>6.9 &ndash; <a name="6.9">Operating System Facilities</a></h2>
+
+<p>
+This library is implemented through table <a name="pdf-os"><code>os</code></a>.
+
+
+<p>
+<hr><h3><a name="pdf-os.clock"><code>os.clock ()</code></a></h3>
+
+
+<p>
+Returns an approximation of the amount in seconds of CPU time
+used by the program.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.date"><code>os.date ([format [, time]])</code></a></h3>
+
+
+<p>
+Returns a string or a table containing date and time,
+formatted according to the given string <code>format</code>.
+
+
+<p>
+If the <code>time</code> argument is present,
+this is the time to be formatted
+(see the <a href="#pdf-os.time"><code>os.time</code></a> function for a description of this value).
+Otherwise, <code>date</code> formats the current time.
+
+
+<p>
+If <code>format</code> starts with '<code>!</code>',
+then the date is formatted in Coordinated Universal Time.
+After this optional character,
+if <code>format</code> is the string "<code>*t</code>",
+then <code>date</code> returns a table with the following fields:
+<code>year</code>, <code>month</code> (1&ndash;12), <code>day</code> (1&ndash;31),
+<code>hour</code> (0&ndash;23), <code>min</code> (0&ndash;59), <code>sec</code> (0&ndash;61),
+<code>wday</code> (weekday, 1&ndash;7, Sunday is&nbsp;1),
+<code>yday</code> (day of the year, 1&ndash;366),
+and <code>isdst</code> (daylight saving flag, a boolean).
+This last field may be absent
+if the information is not available.
+
+
+<p>
+If <code>format</code> is not "<code>*t</code>",
+then <code>date</code> returns the date as a string,
+formatted according to the same rules as the ISO&nbsp;C function <code>strftime</code>.
+
+
+<p>
+When called without arguments,
+<code>date</code> returns a reasonable date and time representation that depends on
+the host system and on the current locale.
+(More specifically, <code>os.date()</code> is equivalent to <code>os.date("%c")</code>.)
+
+
+<p>
+On non-POSIX systems,
+this function may be not thread safe
+because of its reliance on C&nbsp;function <code>gmtime</code> and C&nbsp;function <code>localtime</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.difftime"><code>os.difftime (t2, t1)</code></a></h3>
+
+
+<p>
+Returns the difference, in seconds,
+from time <code>t1</code> to time <code>t2</code>
+(where the times are values returned by <a href="#pdf-os.time"><code>os.time</code></a>).
+In POSIX, Windows, and some other systems,
+this value is exactly <code>t2</code><em>-</em><code>t1</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.execute"><code>os.execute ([command])</code></a></h3>
+
+
+<p>
+This function is equivalent to the ISO&nbsp;C function <code>system</code>.
+It passes <code>command</code> to be executed by an operating system shell.
+Its first result is <b>true</b>
+if the command terminated successfully,
+or <b>nil</b> otherwise.
+After this first result
+the function returns a string plus a number,
+as follows:
+
+<ul>
+
+<li><b>"<code>exit</code>": </b>
+the command terminated normally;
+the following number is the exit status of the command.
+</li>
+
+<li><b>"<code>signal</code>": </b>
+the command was terminated by a signal;
+the following number is the signal that terminated the command.
+</li>
+
+</ul>
+
+<p>
+When called without a <code>command</code>,
+<code>os.execute</code> returns a boolean that is true if a shell is available.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.exit"><code>os.exit ([code [, close]])</code></a></h3>
+
+
+<p>
+Calls the ISO&nbsp;C function <code>exit</code> to terminate the host program.
+If <code>code</code> is <b>true</b>,
+the returned status is <code>EXIT_SUCCESS</code>;
+if <code>code</code> is <b>false</b>,
+the returned status is <code>EXIT_FAILURE</code>;
+if <code>code</code> is a number,
+the returned status is this number.
+The default value for <code>code</code> is <b>true</b>.
+
+
+<p>
+If the optional second argument <code>close</code> is true,
+closes the Lua state before exiting.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.getenv"><code>os.getenv (varname)</code></a></h3>
+
+
+<p>
+Returns the value of the process environment variable <code>varname</code>,
+or <b>nil</b> if the variable is not defined.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.remove"><code>os.remove (filename)</code></a></h3>
+
+
+<p>
+Deletes the file (or empty directory, on POSIX systems)
+with the given name.
+If this function fails, it returns <b>nil</b>,
+plus a string describing the error and the error code.
+Otherwise, it returns true.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.rename"><code>os.rename (oldname, newname)</code></a></h3>
+
+
+<p>
+Renames the file or directory named <code>oldname</code> to <code>newname</code>.
+If this function fails, it returns <b>nil</b>,
+plus a string describing the error and the error code.
+Otherwise, it returns true.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.setlocale"><code>os.setlocale (locale [, category])</code></a></h3>
+
+
+<p>
+Sets the current locale of the program.
+<code>locale</code> is a system-dependent string specifying a locale;
+<code>category</code> is an optional string describing which category to change:
+<code>"all"</code>, <code>"collate"</code>, <code>"ctype"</code>,
+<code>"monetary"</code>, <code>"numeric"</code>, or <code>"time"</code>;
+the default category is <code>"all"</code>.
+The function returns the name of the new locale,
+or <b>nil</b> if the request cannot be honored.
+
+
+<p>
+If <code>locale</code> is the empty string,
+the current locale is set to an implementation-defined native locale.
+If <code>locale</code> is the string "<code>C</code>",
+the current locale is set to the standard C locale.
+
+
+<p>
+When called with <b>nil</b> as the first argument,
+this function only returns the name of the current locale
+for the given category.
+
+
+<p>
+This function may be not thread safe
+because of its reliance on C&nbsp;function <code>setlocale</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.time"><code>os.time ([table])</code></a></h3>
+
+
+<p>
+Returns the current time when called without arguments,
+or a time representing the local date and time specified by the given table.
+This table must have fields <code>year</code>, <code>month</code>, and <code>day</code>,
+and may have fields
+<code>hour</code> (default is 12),
+<code>min</code> (default is 0),
+<code>sec</code> (default is 0),
+and <code>isdst</code> (default is <b>nil</b>).
+Other fields are ignored.
+For a description of these fields, see the <a href="#pdf-os.date"><code>os.date</code></a> function.
+
+
+<p>
+The values in these fields do not need to be inside their valid ranges.
+For instance, if <code>sec</code> is -10,
+it means -10 seconds from the time specified by the other fields;
+if <code>hour</code> is 1000,
+it means +1000 hours from the time specified by the other fields.
+
+
+<p>
+The returned value is a number, whose meaning depends on your system.
+In POSIX, Windows, and some other systems,
+this number counts the number
+of seconds since some given start time (the "epoch").
+In other systems, the meaning is not specified,
+and the number returned by <code>time</code> can be used only as an argument to
+<a href="#pdf-os.date"><code>os.date</code></a> and <a href="#pdf-os.difftime"><code>os.difftime</code></a>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-os.tmpname"><code>os.tmpname ()</code></a></h3>
+
+
+<p>
+Returns a string with a file name that can
+be used for a temporary file.
+The file must be explicitly opened before its use
+and explicitly removed when no longer needed.
+
+
+<p>
+On POSIX systems,
+this function also creates a file with that name,
+to avoid security risks.
+(Someone else might create the file with wrong permissions
+in the time between getting the name and creating the file.)
+You still have to open the file to use it
+and to remove it (even if you do not use it).
+
+
+<p>
+When possible,
+you may prefer to use <a href="#pdf-io.tmpfile"><code>io.tmpfile</code></a>,
+which automatically removes the file when the program ends.
+
+
+
+
+
+
+
+<h2>6.10 &ndash; <a name="6.10">The Debug Library</a></h2>
+
+<p>
+This library provides
+the functionality of the debug interface (<a href="#4.9">&sect;4.9</a>) to Lua programs.
+You should exert care when using this library.
+Several of its functions
+violate basic assumptions about Lua code
+(e.g., that variables local to a function
+cannot be accessed from outside;
+that userdata metatables cannot be changed by Lua code;
+that Lua programs do not crash)
+and therefore can compromise otherwise secure code.
+Moreover, some functions in this library may be slow.
+
+
+<p>
+All functions in this library are provided
+inside the <a name="pdf-debug"><code>debug</code></a> table.
+All functions that operate over a thread
+have an optional first argument which is the
+thread to operate over.
+The default is always the current thread.
+
+
+<p>
+<hr><h3><a name="pdf-debug.debug"><code>debug.debug ()</code></a></h3>
+
+
+<p>
+Enters an interactive mode with the user,
+running each string that the user enters.
+Using simple commands and other debug facilities,
+the user can inspect global and local variables,
+change their values, evaluate expressions, and so on.
+A line containing only the word <code>cont</code> finishes this function,
+so that the caller continues its execution.
+
+
+<p>
+Note that commands for <code>debug.debug</code> are not lexically nested
+within any function and so have no direct access to local variables.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.gethook"><code>debug.gethook ([thread])</code></a></h3>
+
+
+<p>
+Returns the current hook settings of the thread, as three values:
+the current hook function, the current hook mask,
+and the current hook count
+(as set by the <a href="#pdf-debug.sethook"><code>debug.sethook</code></a> function).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getinfo"><code>debug.getinfo ([thread,] f [, what])</code></a></h3>
+
+
+<p>
+Returns a table with information about a function.
+You can give the function directly
+or you can give a number as the value of <code>f</code>,
+which means the function running at level <code>f</code> of the call stack
+of the given thread:
+level&nbsp;0 is the current function (<code>getinfo</code> itself);
+level&nbsp;1 is the function that called <code>getinfo</code>
+(except for tail calls, which do not count on the stack);
+and so on.
+If <code>f</code> is a number larger than the number of active functions,
+then <code>getinfo</code> returns <b>nil</b>.
+
+
+<p>
+The returned table can contain all the fields returned by <a href="#lua_getinfo"><code>lua_getinfo</code></a>,
+with the string <code>what</code> describing which fields to fill in.
+The default for <code>what</code> is to get all information available,
+except the table of valid lines.
+If present,
+the option '<code>f</code>'
+adds a field named <code>func</code> with the function itself.
+If present,
+the option '<code>L</code>'
+adds a field named <code>activelines</code> with the table of
+valid lines.
+
+
+<p>
+For instance, the expression <code>debug.getinfo(1,"n").name</code> returns
+a name for the current function,
+if a reasonable name can be found,
+and the expression <code>debug.getinfo(print)</code>
+returns a table with all available information
+about the <a href="#pdf-print"><code>print</code></a> function.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getlocal"><code>debug.getlocal ([thread,] f, local)</code></a></h3>
+
+
+<p>
+This function returns the name and the value of the local variable
+with index <code>local</code> of the function at level <code>f</code> of the stack.
+This function accesses not only explicit local variables,
+but also parameters, temporaries, etc.
+
+
+<p>
+The first parameter or local variable has index&nbsp;1, and so on,
+following the order that they are declared in the code,
+counting only the variables that are active
+in the current scope of the function.
+Negative indices refer to vararg parameters;
+-1 is the first vararg parameter.
+The function returns <b>nil</b> if there is no variable with the given index,
+and raises an error when called with a level out of range.
+(You can call <a href="#pdf-debug.getinfo"><code>debug.getinfo</code></a> to check whether the level is valid.)
+
+
+<p>
+Variable names starting with '<code>(</code>' (open parenthesis)
+represent variables with no known names
+(internal variables such as loop control variables,
+and variables from chunks saved without debug information).
+
+
+<p>
+The parameter <code>f</code> may also be a function.
+In that case, <code>getlocal</code> returns only the name of function parameters.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getmetatable"><code>debug.getmetatable (value)</code></a></h3>
+
+
+<p>
+Returns the metatable of the given <code>value</code>
+or <b>nil</b> if it does not have a metatable.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getregistry"><code>debug.getregistry ()</code></a></h3>
+
+
+<p>
+Returns the registry table (see <a href="#4.5">&sect;4.5</a>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getupvalue"><code>debug.getupvalue (f, up)</code></a></h3>
+
+
+<p>
+This function returns the name and the value of the upvalue
+with index <code>up</code> of the function <code>f</code>.
+The function returns <b>nil</b> if there is no upvalue with the given index.
+
+
+<p>
+Variable names starting with '<code>(</code>' (open parenthesis)
+represent variables with no known names
+(variables from chunks saved without debug information).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.getuservalue"><code>debug.getuservalue (u)</code></a></h3>
+
+
+<p>
+Returns the Lua value associated to <code>u</code>.
+If <code>u</code> is not a full userdata,
+returns <b>nil</b>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.sethook"><code>debug.sethook ([thread,] hook, mask [, count])</code></a></h3>
+
+
+<p>
+Sets the given function as a hook.
+The string <code>mask</code> and the number <code>count</code> describe
+when the hook will be called.
+The string mask may have any combination of the following characters,
+with the given meaning:
+
+<ul>
+<li><b>'<code>c</code>': </b> the hook is called every time Lua calls a function;</li>
+<li><b>'<code>r</code>': </b> the hook is called every time Lua returns from a function;</li>
+<li><b>'<code>l</code>': </b> the hook is called every time Lua enters a new line of code.</li>
+</ul><p>
+Moreover,
+with a <code>count</code> different from zero,
+the hook is called also after every <code>count</code> instructions.
+
+
+<p>
+When called without arguments,
+<a href="#pdf-debug.sethook"><code>debug.sethook</code></a> turns off the hook.
+
+
+<p>
+When the hook is called, its first parameter is a string
+describing the event that has triggered its call:
+<code>"call"</code> (or <code>"tail call"</code>),
+<code>"return"</code>,
+<code>"line"</code>, and <code>"count"</code>.
+For line events,
+the hook also gets the new line number as its second parameter.
+Inside a hook,
+you can call <code>getinfo</code> with level&nbsp;2 to get more information about
+the running function
+(level&nbsp;0 is the <code>getinfo</code> function,
+and level&nbsp;1 is the hook function).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.setlocal"><code>debug.setlocal ([thread,] level, local, value)</code></a></h3>
+
+
+<p>
+This function assigns the value <code>value</code> to the local variable
+with index <code>local</code> of the function at level <code>level</code> of the stack.
+The function returns <b>nil</b> if there is no local
+variable with the given index,
+and raises an error when called with a <code>level</code> out of range.
+(You can call <code>getinfo</code> to check whether the level is valid.)
+Otherwise, it returns the name of the local variable.
+
+
+<p>
+See <a href="#pdf-debug.getlocal"><code>debug.getlocal</code></a> for more information about
+variable indices and names.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.setmetatable"><code>debug.setmetatable (value, table)</code></a></h3>
+
+
+<p>
+Sets the metatable for the given <code>value</code> to the given <code>table</code>
+(which can be <b>nil</b>).
+Returns <code>value</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.setupvalue"><code>debug.setupvalue (f, up, value)</code></a></h3>
+
+
+<p>
+This function assigns the value <code>value</code> to the upvalue
+with index <code>up</code> of the function <code>f</code>.
+The function returns <b>nil</b> if there is no upvalue
+with the given index.
+Otherwise, it returns the name of the upvalue.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.setuservalue"><code>debug.setuservalue (udata, value)</code></a></h3>
+
+
+<p>
+Sets the given <code>value</code> as
+the Lua value associated to the given <code>udata</code>.
+<code>udata</code> must be a full userdata.
+
+
+<p>
+Returns <code>udata</code>.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.traceback"><code>debug.traceback ([thread,] [message [, level]])</code></a></h3>
+
+
+<p>
+If <code>message</code> is present but is neither a string nor <b>nil</b>,
+this function returns <code>message</code> without further processing.
+Otherwise,
+it returns a string with a traceback of the call stack.
+The optional <code>message</code> string is appended
+at the beginning of the traceback.
+An optional <code>level</code> number tells at which level
+to start the traceback
+(default is 1, the function calling <code>traceback</code>).
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.upvalueid"><code>debug.upvalueid (f, n)</code></a></h3>
+
+
+<p>
+Returns a unique identifier (as a light userdata)
+for the upvalue numbered <code>n</code>
+from the given function.
+
+
+<p>
+These unique identifiers allow a program to check whether different
+closures share upvalues.
+Lua closures that share an upvalue
+(that is, that access a same external local variable)
+will return identical ids for those upvalue indices.
+
+
+
+
+<p>
+<hr><h3><a name="pdf-debug.upvaluejoin"><code>debug.upvaluejoin (f1, n1, f2, n2)</code></a></h3>
+
+
+<p>
+Make the <code>n1</code>-th upvalue of the Lua closure <code>f1</code>
+refer to the <code>n2</code>-th upvalue of the Lua closure <code>f2</code>.
+
+
+
+
+
+
+
+<h1>7 &ndash; <a name="7">Lua Standalone</a></h1>
+
+<p>
+Although Lua has been designed as an extension language,
+to be embedded in a host C&nbsp;program,
+it is also frequently used as a standalone language.
+An interpreter for Lua as a standalone language,
+called simply <code>lua</code>,
+is provided with the standard distribution.
+The standalone interpreter includes
+all standard libraries, including the debug library.
+Its usage is:
+
+<pre>
+ lua [options] [script [args]]
+</pre><p>
+The options are:
+
+<ul>
+<li><b><code>-e <em>stat</em></code>: </b> executes string <em>stat</em>;</li>
+<li><b><code>-l <em>mod</em></code>: </b> "requires" <em>mod</em>;</li>
+<li><b><code>-i</code>: </b> enters interactive mode after running <em>script</em>;</li>
+<li><b><code>-v</code>: </b> prints version information;</li>
+<li><b><code>-E</code>: </b> ignores environment variables;</li>
+<li><b><code>--</code>: </b> stops handling options;</li>
+<li><b><code>-</code>: </b> executes <code>stdin</code> as a file and stops handling options.</li>
+</ul><p>
+After handling its options, <code>lua</code> runs the given <em>script</em>.
+When called without arguments,
+<code>lua</code> behaves as <code>lua -v -i</code>
+when the standard input (<code>stdin</code>) is a terminal,
+and as <code>lua -</code> otherwise.
+
+
+<p>
+When called without option <code>-E</code>,
+the interpreter checks for an environment variable <a name="pdf-LUA_INIT_5_3"><code>LUA_INIT_5_3</code></a>
+(or <a name="pdf-LUA_INIT"><code>LUA_INIT</code></a> if the versioned name is not defined)
+before running any argument.
+If the variable content has the format <code>@<em>filename</em></code>,
+then <code>lua</code> executes the file.
+Otherwise, <code>lua</code> executes the string itself.
+
+
+<p>
+When called with option <code>-E</code>,
+besides ignoring <code>LUA_INIT</code>,
+Lua also ignores
+the values of <code>LUA_PATH</code> and <code>LUA_CPATH</code>,
+setting the values of
+<a href="#pdf-package.path"><code>package.path</code></a> and <a href="#pdf-package.cpath"><code>package.cpath</code></a>
+with the default paths defined in <code>luaconf.h</code>.
+
+
+<p>
+All options are handled in order, except <code>-i</code> and <code>-E</code>.
+For instance, an invocation like
+
+<pre>
+ $ lua -e'a=1' -e 'print(a)' script.lua
+</pre><p>
+will first set <code>a</code> to 1, then print the value of <code>a</code>,
+and finally run the file <code>script.lua</code> with no arguments.
+(Here <code>$</code> is the shell prompt. Your prompt may be different.)
+
+
+<p>
+Before running any code,
+<code>lua</code> collects all command-line arguments
+in a global table called <code>arg</code>.
+The script name goes to index 0,
+the first argument after the script name goes to index 1,
+and so on.
+Any arguments before the script name
+(that is, the interpreter name plus its options)
+go to negative indices.
+For instance, in the call
+
+<pre>
+ $ lua -la b.lua t1 t2
+</pre><p>
+the table is like this:
+
+<pre>
+ arg = { [-2] = "lua", [-1] = "-la",
+ [0] = "b.lua",
+ [1] = "t1", [2] = "t2" }
+</pre><p>
+If there is no script in the call,
+the interpreter name goes to index 0,
+followed by the other arguments.
+For instance, the call
+
+<pre>
+ $ lua -e "print(arg[1])"
+</pre><p>
+will print "<code>-e</code>".
+If there is a script,
+the script is called with parameters
+<code>arg[1]</code>, &middot;&middot;&middot;, <code>arg[#arg]</code>.
+(Like all chunks in Lua,
+the script is compiled as a vararg function.)
+
+
+<p>
+In interactive mode,
+Lua repeatedly prompts and waits for a line.
+After reading a line,
+Lua first try to interpret the line as an expression.
+If it succeeds, it prints its value.
+Otherwise, it interprets the line as a statement.
+If you write an incomplete statement,
+the interpreter waits for its completion
+by issuing a different prompt.
+
+
+<p>
+If the global variable <a name="pdf-_PROMPT"><code>_PROMPT</code></a> contains a string,
+then its value is used as the prompt.
+Similarly, if the global variable <a name="pdf-_PROMPT2"><code>_PROMPT2</code></a> contains a string,
+its value is used as the secondary prompt
+(issued during incomplete statements).
+
+
+<p>
+In case of unprotected errors in the script,
+the interpreter reports the error to the standard error stream.
+If the error object is not a string but
+has a metamethod <code>__tostring</code>,
+the interpreter calls this metamethod to produce the final message.
+Otherwise, the interpreter converts the error object to a string
+and adds a stack traceback to it.
+
+
+<p>
+When finishing normally,
+the interpreter closes its main Lua state
+(see <a href="#lua_close"><code>lua_close</code></a>).
+The script can avoid this step by
+calling <a href="#pdf-os.exit"><code>os.exit</code></a> to terminate.
+
+
+<p>
+To allow the use of Lua as a
+script interpreter in Unix systems,
+the standalone interpreter skips
+the first line of a chunk if it starts with <code>#</code>.
+Therefore, Lua scripts can be made into executable programs
+by using <code>chmod +x</code> and the&nbsp;<code>#!</code> form,
+as in
+
+<pre>
+ #!/usr/local/bin/lua
+</pre><p>
+(Of course,
+the location of the Lua interpreter may be different in your machine.
+If <code>lua</code> is in your <code>PATH</code>,
+then
+
+<pre>
+ #!/usr/bin/env lua
+</pre><p>
+is a more portable solution.)
+
+
+
+<h1>8 &ndash; <a name="8">Incompatibilities with the Previous Version</a></h1>
+
+<p>
+Here we list the incompatibilities that you may find when moving a program
+from Lua&nbsp;5.2 to Lua&nbsp;5.3.
+You can avoid some incompatibilities by compiling Lua with
+appropriate options (see file <code>luaconf.h</code>).
+However,
+all these compatibility options will be removed in the future.
+
+
+<p>
+Lua versions can always change the C API in ways that
+do not imply source-code changes in a program,
+such as the numeric values for constants
+or the implementation of functions as macros.
+Therefore,
+you should not assume that binaries are compatible between
+different Lua versions.
+Always recompile clients of the Lua API when
+using a new version.
+
+
+<p>
+Similarly, Lua versions can always change the internal representation
+of precompiled chunks;
+precompiled chunks are not compatible between different Lua versions.
+
+
+<p>
+The standard paths in the official distribution may
+change between versions.
+
+
+
+<h2>8.1 &ndash; <a name="8.1">Changes in the Language</a></h2>
+<ul>
+
+<li>
+The main difference between Lua&nbsp;5.2 and Lua&nbsp;5.3 is the
+introduction of an integer subtype for numbers.
+Although this change should not affect "normal" computations,
+some computations
+(mainly those that involve some kind of overflow)
+can give different results.
+
+
+<p>
+You can fix these differences by forcing a number to be a float
+(in Lua&nbsp;5.2 all numbers were float),
+in particular writing constants with an ending <code>.0</code>
+or using <code>x = x + 0.0</code> to convert a variable.
+(This recommendation is only for a quick fix
+for an occasional incompatibility;
+it is not a general guideline for good programming.
+For good programming,
+use floats where you need floats
+and integers where you need integers.)
+</li>
+
+<li>
+The conversion of a float to a string now adds a <code>.0</code> suffix
+to the result if it looks like an integer.
+(For instance, the float 2.0 will be printed as <code>2.0</code>,
+not as <code>2</code>.)
+You should always use an explicit format
+when you need a specific format for numbers.
+
+
+<p>
+(Formally this is not an incompatibility,
+because Lua does not specify how numbers are formatted as strings,
+but some programs assumed a specific format.)
+</li>
+
+<li>
+The generational mode for the garbage collector was removed.
+(It was an experimental feature in Lua&nbsp;5.2.)
+</li>
+
+</ul>
+
+
+
+
+<h2>8.2 &ndash; <a name="8.2">Changes in the Libraries</a></h2>
+<ul>
+
+<li>
+The <code>bit32</code> library has been deprecated.
+It is easy to require a compatible external library or,
+better yet, to replace its functions with appropriate bitwise operations.
+(Keep in mind that <code>bit32</code> operates on 32-bit integers,
+while the bitwise operators in Lua&nbsp;5.3 operate on Lua integers,
+which by default have 64&nbsp;bits.)
+</li>
+
+<li>
+The Table library now respects metamethods
+for setting and getting elements.
+</li>
+
+<li>
+The <a href="#pdf-ipairs"><code>ipairs</code></a> iterator now respects metamethods and
+its <code>__ipairs</code> metamethod has been deprecated.
+</li>
+
+<li>
+Option names in <a href="#pdf-io.read"><code>io.read</code></a> do not have a starting '<code>*</code>' anymore.
+For compatibility, Lua will continue to accept (and ignore) this character.
+</li>
+
+<li>
+The following functions were deprecated in the mathematical library:
+<code>atan2</code>, <code>cosh</code>, <code>sinh</code>, <code>tanh</code>, <code>pow</code>,
+<code>frexp</code>, and <code>ldexp</code>.
+You can replace <code>math.pow(x,y)</code> with <code>x^y</code>;
+you can replace <code>math.atan2</code> with <code>math.atan</code>,
+which now accepts one or two parameters;
+you can replace <code>math.ldexp(x,exp)</code> with <code>x * 2.0^exp</code>.
+For the other operations,
+you can either use an external library or
+implement them in Lua.
+</li>
+
+<li>
+The searcher for C loaders used by <a href="#pdf-require"><code>require</code></a>
+changed the way it handles versioned names.
+Now, the version should come after the module name
+(as is usual in most other tools).
+For compatibility, that searcher still tries the old format
+if it cannot find an open function according to the new style.
+(Lua&nbsp;5.2 already worked that way,
+but it did not document the change.)
+</li>
+
+<li>
+The call <code>collectgarbage("count")</code> now returns only one result.
+(You can compute that second result from the fractional part
+of the first result.)
+</li>
+
+</ul>
+
+
+
+
+<h2>8.3 &ndash; <a name="8.3">Changes in the API</a></h2>
+
+
+<ul>
+
+<li>
+Continuation functions now receive as parameters what they needed
+to get through <code>lua_getctx</code>,
+so <code>lua_getctx</code> has been removed.
+Adapt your code accordingly.
+</li>
+
+<li>
+Function <a href="#lua_dump"><code>lua_dump</code></a> has an extra parameter, <code>strip</code>.
+Use 0 as the value of this parameter to get the old behavior.
+</li>
+
+<li>
+Functions to inject/project unsigned integers
+(<code>lua_pushunsigned</code>, <code>lua_tounsigned</code>, <code>lua_tounsignedx</code>,
+<code>luaL_checkunsigned</code>, <code>luaL_optunsigned</code>)
+were deprecated.
+Use their signed equivalents with a type cast.
+</li>
+
+<li>
+Macros to project non-default integer types
+(<code>luaL_checkint</code>, <code>luaL_optint</code>, <code>luaL_checklong</code>, <code>luaL_optlong</code>)
+were deprecated.
+Use their equivalent over <a href="#lua_Integer"><code>lua_Integer</code></a> with a type cast
+(or, when possible, use <a href="#lua_Integer"><code>lua_Integer</code></a> in your code).
+</li>
+
+</ul>
+
+
+
+
+<h1>9 &ndash; <a name="9">The Complete Syntax of Lua</a></h1>
+
+<p>
+Here is the complete syntax of Lua in extended BNF.
+As usual in extended BNF,
+{A} means 0 or more As,
+and [A] means an optional A.
+(For operator precedences, see <a href="#3.4.8">&sect;3.4.8</a>;
+for a description of the terminals
+Name, Numeral,
+and LiteralString, see <a href="#3.1">&sect;3.1</a>.)
+
+
+
+
+<pre>
+
+ chunk ::= block
+
+ block ::= {stat} [retstat]
+
+ stat ::= &lsquo;<b>;</b>&rsquo; |
+ varlist &lsquo;<b>=</b>&rsquo; explist |
+ functioncall |
+ label |
+ <b>break</b> |
+ <b>goto</b> Name |
+ <b>do</b> block <b>end</b> |
+ <b>while</b> exp <b>do</b> block <b>end</b> |
+ <b>repeat</b> block <b>until</b> exp |
+ <b>if</b> exp <b>then</b> block {<b>elseif</b> exp <b>then</b> block} [<b>else</b> block] <b>end</b> |
+ <b>for</b> Name &lsquo;<b>=</b>&rsquo; exp &lsquo;<b>,</b>&rsquo; exp [&lsquo;<b>,</b>&rsquo; exp] <b>do</b> block <b>end</b> |
+ <b>for</b> namelist <b>in</b> explist <b>do</b> block <b>end</b> |
+ <b>function</b> funcname funcbody |
+ <b>local</b> <b>function</b> Name funcbody |
+ <b>local</b> namelist [&lsquo;<b>=</b>&rsquo; explist]
+
+ retstat ::= <b>return</b> [explist] [&lsquo;<b>;</b>&rsquo;]
+
+ label ::= &lsquo;<b>::</b>&rsquo; Name &lsquo;<b>::</b>&rsquo;
+
+ funcname ::= Name {&lsquo;<b>.</b>&rsquo; Name} [&lsquo;<b>:</b>&rsquo; Name]
+
+ varlist ::= var {&lsquo;<b>,</b>&rsquo; var}
+
+ var ::= Name | prefixexp &lsquo;<b>[</b>&rsquo; exp &lsquo;<b>]</b>&rsquo; | prefixexp &lsquo;<b>.</b>&rsquo; Name
+
+ namelist ::= Name {&lsquo;<b>,</b>&rsquo; Name}
+
+ explist ::= exp {&lsquo;<b>,</b>&rsquo; exp}
+
+ exp ::= <b>nil</b> | <b>false</b> | <b>true</b> | Numeral | LiteralString | &lsquo;<b>...</b>&rsquo; | functiondef |
+ prefixexp | tableconstructor | exp binop exp | unop exp
+
+ prefixexp ::= var | functioncall | &lsquo;<b>(</b>&rsquo; exp &lsquo;<b>)</b>&rsquo;
+
+ functioncall ::= prefixexp args | prefixexp &lsquo;<b>:</b>&rsquo; Name args
+
+ args ::= &lsquo;<b>(</b>&rsquo; [explist] &lsquo;<b>)</b>&rsquo; | tableconstructor | LiteralString
+
+ functiondef ::= <b>function</b> funcbody
+
+ funcbody ::= &lsquo;<b>(</b>&rsquo; [parlist] &lsquo;<b>)</b>&rsquo; block <b>end</b>
+
+ parlist ::= namelist [&lsquo;<b>,</b>&rsquo; &lsquo;<b>...</b>&rsquo;] | &lsquo;<b>...</b>&rsquo;
+
+ tableconstructor ::= &lsquo;<b>{</b>&rsquo; [fieldlist] &lsquo;<b>}</b>&rsquo;
+
+ fieldlist ::= field {fieldsep field} [fieldsep]
+
+ field ::= &lsquo;<b>[</b>&rsquo; exp &lsquo;<b>]</b>&rsquo; &lsquo;<b>=</b>&rsquo; exp | Name &lsquo;<b>=</b>&rsquo; exp | exp
+
+ fieldsep ::= &lsquo;<b>,</b>&rsquo; | &lsquo;<b>;</b>&rsquo;
+
+ binop ::= &lsquo;<b>+</b>&rsquo; | &lsquo;<b>-</b>&rsquo; | &lsquo;<b>*</b>&rsquo; | &lsquo;<b>/</b>&rsquo; | &lsquo;<b>//</b>&rsquo; | &lsquo;<b>^</b>&rsquo; | &lsquo;<b>%</b>&rsquo; |
+ &lsquo;<b>&amp;</b>&rsquo; | &lsquo;<b>~</b>&rsquo; | &lsquo;<b>|</b>&rsquo; | &lsquo;<b>&gt;&gt;</b>&rsquo; | &lsquo;<b>&lt;&lt;</b>&rsquo; | &lsquo;<b>..</b>&rsquo; |
+ &lsquo;<b>&lt;</b>&rsquo; | &lsquo;<b>&lt;=</b>&rsquo; | &lsquo;<b>&gt;</b>&rsquo; | &lsquo;<b>&gt;=</b>&rsquo; | &lsquo;<b>==</b>&rsquo; | &lsquo;<b>~=</b>&rsquo; |
+ <b>and</b> | <b>or</b>
+
+ unop ::= &lsquo;<b>-</b>&rsquo; | <b>not</b> | &lsquo;<b>#</b>&rsquo; | &lsquo;<b>~</b>&rsquo;
+
+</pre>
+
+<p>
+
+
+
+
+
+
+
+<P CLASS="footer">
+Last update:
+Mon Jan 9 13:30:53 BRST 2017
+</P>
+<!--
+Last change: revised for Lua 5.3.4
+-->
+
+</body></html>
+
diff --git a/lua/src/lapi.c b/lua/src/lapi.c
new file mode 100644
index 000000000..c9455a5d8
--- /dev/null
+++ b/lua/src/lapi.c
@@ -0,0 +1,1298 @@
+/*
+** $Id: lapi.c,v 2.259 2016/02/29 14:27:14 roberto Exp $
+** Lua API
+** See Copyright Notice in lua.h
+*/
+
+#define lapi_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stdarg.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lapi.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+#include "lundump.h"
+#include "lvm.h"
+
+
+
+const char lua_ident[] =
+ "$LuaVersion: " LUA_COPYRIGHT " $"
+ "$LuaAuthors: " LUA_AUTHORS " $";
+
+
+/* value at a non-valid index */
+#define NONVALIDVALUE cast(TValue *, luaO_nilobject)
+
+/* corresponding test */
+#define isvalid(o) ((o) != luaO_nilobject)
+
+/* test for pseudo index */
+#define ispseudo(i) ((i) <= LUA_REGISTRYINDEX)
+
+/* test for upvalue */
+#define isupvalue(i) ((i) < LUA_REGISTRYINDEX)
+
+/* test for valid but not pseudo index */
+#define isstackindex(i, o) (isvalid(o) && !ispseudo(i))
+
+#define api_checkvalidindex(l,o) api_check(l, isvalid(o), "invalid index")
+
+#define api_checkstackindex(l, i, o) \
+ api_check(l, isstackindex(i, o), "index not in the stack")
+
+
+static TValue *index2addr (lua_State *L, int idx) {
+ CallInfo *ci = L->ci;
+ if (idx > 0) {
+ TValue *o = ci->func + idx;
+ api_check(L, idx <= ci->top - (ci->func + 1), "unacceptable index");
+ if (o >= L->top) return NONVALIDVALUE;
+ else return o;
+ }
+ else if (!ispseudo(idx)) { /* negative index */
+ api_check(L, idx != 0 && -idx <= L->top - (ci->func + 1), "invalid index");
+ return L->top + idx;
+ }
+ else if (idx == LUA_REGISTRYINDEX)
+ return &G(L)->l_registry;
+ else { /* upvalues */
+ idx = LUA_REGISTRYINDEX - idx;
+ api_check(L, idx <= MAXUPVAL + 1, "upvalue index too large");
+ if (ttislcf(ci->func)) /* light C function? */
+ return NONVALIDVALUE; /* it has no upvalues */
+ else {
+ CClosure *func = clCvalue(ci->func);
+ return (idx <= func->nupvalues) ? &func->upvalue[idx-1] : NONVALIDVALUE;
+ }
+ }
+}
+
+
+/*
+** to be called by 'lua_checkstack' in protected mode, to grow stack
+** capturing memory errors
+*/
+static void growstack (lua_State *L, void *ud) {
+ int size = *(int *)ud;
+ luaD_growstack(L, size);
+}
+
+
+LUA_API int lua_checkstack (lua_State *L, int n) {
+ int res;
+ CallInfo *ci = L->ci;
+ lua_lock(L);
+ api_check(L, n >= 0, "negative 'n'");
+ if (L->stack_last - L->top > n) /* stack large enough? */
+ res = 1; /* yes; check is OK */
+ else { /* no; need to grow stack */
+ int inuse = cast_int(L->top - L->stack) + EXTRA_STACK;
+ if (inuse > LUAI_MAXSTACK - n) /* can grow without overflow? */
+ res = 0; /* no */
+ else /* try to grow stack */
+ res = (luaD_rawrunprotected(L, &growstack, &n) == LUA_OK);
+ }
+ if (res && ci->top < L->top + n)
+ ci->top = L->top + n; /* adjust frame top */
+ lua_unlock(L);
+ return res;
+}
+
+
+LUA_API void lua_xmove (lua_State *from, lua_State *to, int n) {
+ int i;
+ if (from == to) return;
+ lua_lock(to);
+ api_checknelems(from, n);
+ api_check(from, G(from) == G(to), "moving among independent states");
+ api_check(from, to->ci->top - to->top >= n, "stack overflow");
+ from->top -= n;
+ for (i = 0; i < n; i++) {
+ setobj2s(to, to->top, from->top + i);
+ to->top++; /* stack already checked by previous 'api_check' */
+ }
+ lua_unlock(to);
+}
+
+
+LUA_API lua_CFunction lua_atpanic (lua_State *L, lua_CFunction panicf) {
+ lua_CFunction old;
+ lua_lock(L);
+ old = G(L)->panic;
+ G(L)->panic = panicf;
+ lua_unlock(L);
+ return old;
+}
+
+
+LUA_API const lua_Number *lua_version (lua_State *L) {
+ static const lua_Number version = LUA_VERSION_NUM;
+ if (L == NULL) return &version;
+ else return G(L)->version;
+}
+
+
+
+/*
+** basic stack manipulation
+*/
+
+
+/*
+** convert an acceptable stack index into an absolute index
+*/
+LUA_API int lua_absindex (lua_State *L, int idx) {
+ return (idx > 0 || ispseudo(idx))
+ ? idx
+ : cast_int(L->top - L->ci->func) + idx;
+}
+
+
+LUA_API int lua_gettop (lua_State *L) {
+ return cast_int(L->top - (L->ci->func + 1));
+}
+
+
+LUA_API void lua_settop (lua_State *L, int idx) {
+ StkId func = L->ci->func;
+ lua_lock(L);
+ if (idx >= 0) {
+ api_check(L, idx <= L->stack_last - (func + 1), "new top too large");
+ while (L->top < (func + 1) + idx)
+ setnilvalue(L->top++);
+ L->top = (func + 1) + idx;
+ }
+ else {
+ api_check(L, -(idx+1) <= (L->top - (func + 1)), "invalid new top");
+ L->top += idx+1; /* 'subtract' index (index is negative) */
+ }
+ lua_unlock(L);
+}
+
+
+/*
+** Reverse the stack segment from 'from' to 'to'
+** (auxiliary to 'lua_rotate')
+*/
+static void reverse (lua_State *L, StkId from, StkId to) {
+ for (; from < to; from++, to--) {
+ TValue temp;
+ setobj(L, &temp, from);
+ setobjs2s(L, from, to);
+ setobj2s(L, to, &temp);
+ }
+}
+
+
+/*
+** Let x = AB, where A is a prefix of length 'n'. Then,
+** rotate x n == BA. But BA == (A^r . B^r)^r.
+*/
+LUA_API void lua_rotate (lua_State *L, int idx, int n) {
+ StkId p, t, m;
+ lua_lock(L);
+ t = L->top - 1; /* end of stack segment being rotated */
+ p = index2addr(L, idx); /* start of segment */
+ api_checkstackindex(L, idx, p);
+ api_check(L, (n >= 0 ? n : -n) <= (t - p + 1), "invalid 'n'");
+ m = (n >= 0 ? t - n : p - n - 1); /* end of prefix */
+ reverse(L, p, m); /* reverse the prefix with length 'n' */
+ reverse(L, m + 1, t); /* reverse the suffix */
+ reverse(L, p, t); /* reverse the entire segment */
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_copy (lua_State *L, int fromidx, int toidx) {
+ TValue *fr, *to;
+ lua_lock(L);
+ fr = index2addr(L, fromidx);
+ to = index2addr(L, toidx);
+ api_checkvalidindex(L, to);
+ setobj(L, to, fr);
+ if (isupvalue(toidx)) /* function upvalue? */
+ luaC_barrier(L, clCvalue(L->ci->func), fr);
+ /* LUA_REGISTRYINDEX does not need gc barrier
+ (collector revisits it before finishing collection) */
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_pushvalue (lua_State *L, int idx) {
+ lua_lock(L);
+ setobj2s(L, L->top, index2addr(L, idx));
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+
+/*
+** access functions (stack -> C)
+*/
+
+
+LUA_API int lua_type (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ return (isvalid(o) ? ttnov(o) : LUA_TNONE);
+}
+
+
+LUA_API const char *lua_typename (lua_State *L, int t) {
+ UNUSED(L);
+ api_check(L, LUA_TNONE <= t && t < LUA_NUMTAGS, "invalid tag");
+ return ttypename(t);
+}
+
+
+LUA_API int lua_iscfunction (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ return (ttislcf(o) || (ttisCclosure(o)));
+}
+
+
+LUA_API int lua_isinteger (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ return ttisinteger(o);
+}
+
+
+LUA_API int lua_isnumber (lua_State *L, int idx) {
+ lua_Number n;
+ const TValue *o = index2addr(L, idx);
+ return tonumber(o, &n);
+}
+
+
+LUA_API int lua_isstring (lua_State *L, int idx) {
+ const TValue *o = index2addr(L, idx);
+ return (ttisstring(o) || cvt2str(o));
+}
+
+
+LUA_API int lua_isuserdata (lua_State *L, int idx) {
+ const TValue *o = index2addr(L, idx);
+ return (ttisfulluserdata(o) || ttislightuserdata(o));
+}
+
+
+LUA_API int lua_rawequal (lua_State *L, int index1, int index2) {
+ StkId o1 = index2addr(L, index1);
+ StkId o2 = index2addr(L, index2);
+ return (isvalid(o1) && isvalid(o2)) ? luaV_rawequalobj(o1, o2) : 0;
+}
+
+
+LUA_API void lua_arith (lua_State *L, int op) {
+ lua_lock(L);
+ if (op != LUA_OPUNM && op != LUA_OPBNOT)
+ api_checknelems(L, 2); /* all other operations expect two operands */
+ else { /* for unary operations, add fake 2nd operand */
+ api_checknelems(L, 1);
+ setobjs2s(L, L->top, L->top - 1);
+ api_incr_top(L);
+ }
+ /* first operand at top - 2, second at top - 1; result go to top - 2 */
+ luaO_arith(L, op, L->top - 2, L->top - 1, L->top - 2);
+ L->top--; /* remove second operand */
+ lua_unlock(L);
+}
+
+
+LUA_API int lua_compare (lua_State *L, int index1, int index2, int op) {
+ StkId o1, o2;
+ int i = 0;
+ lua_lock(L); /* may call tag method */
+ o1 = index2addr(L, index1);
+ o2 = index2addr(L, index2);
+ if (isvalid(o1) && isvalid(o2)) {
+ switch (op) {
+ case LUA_OPEQ: i = luaV_equalobj(L, o1, o2); break;
+ case LUA_OPLT: i = luaV_lessthan(L, o1, o2); break;
+ case LUA_OPLE: i = luaV_lessequal(L, o1, o2); break;
+ default: api_check(L, 0, "invalid option");
+ }
+ }
+ lua_unlock(L);
+ return i;
+}
+
+
+LUA_API size_t lua_stringtonumber (lua_State *L, const char *s) {
+ size_t sz = luaO_str2num(s, L->top);
+ if (sz != 0)
+ api_incr_top(L);
+ return sz;
+}
+
+
+LUA_API lua_Number lua_tonumberx (lua_State *L, int idx, int *pisnum) {
+ lua_Number n;
+ const TValue *o = index2addr(L, idx);
+ int isnum = tonumber(o, &n);
+ if (!isnum)
+ n = 0; /* call to 'tonumber' may change 'n' even if it fails */
+ if (pisnum) *pisnum = isnum;
+ return n;
+}
+
+
+LUA_API lua_Integer lua_tointegerx (lua_State *L, int idx, int *pisnum) {
+ lua_Integer res;
+ const TValue *o = index2addr(L, idx);
+ int isnum = tointeger(o, &res);
+ if (!isnum)
+ res = 0; /* call to 'tointeger' may change 'n' even if it fails */
+ if (pisnum) *pisnum = isnum;
+ return res;
+}
+
+
+LUA_API int lua_toboolean (lua_State *L, int idx) {
+ const TValue *o = index2addr(L, idx);
+ return !l_isfalse(o);
+}
+
+
+LUA_API const char *lua_tolstring (lua_State *L, int idx, size_t *len) {
+ StkId o = index2addr(L, idx);
+ if (!ttisstring(o)) {
+ if (!cvt2str(o)) { /* not convertible? */
+ if (len != NULL) *len = 0;
+ return NULL;
+ }
+ lua_lock(L); /* 'luaO_tostring' may create a new string */
+ luaO_tostring(L, o);
+ luaC_checkGC(L);
+ o = index2addr(L, idx); /* previous call may reallocate the stack */
+ lua_unlock(L);
+ }
+ if (len != NULL)
+ *len = vslen(o);
+ return svalue(o);
+}
+
+
+LUA_API size_t lua_rawlen (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ switch (ttype(o)) {
+ case LUA_TSHRSTR: return tsvalue(o)->shrlen;
+ case LUA_TLNGSTR: return tsvalue(o)->u.lnglen;
+ case LUA_TUSERDATA: return uvalue(o)->len;
+ case LUA_TTABLE: return luaH_getn(hvalue(o));
+ default: return 0;
+ }
+}
+
+
+LUA_API lua_CFunction lua_tocfunction (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ if (ttislcf(o)) return fvalue(o);
+ else if (ttisCclosure(o))
+ return clCvalue(o)->f;
+ else return NULL; /* not a C function */
+}
+
+
+LUA_API void *lua_touserdata (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ switch (ttnov(o)) {
+ case LUA_TUSERDATA: return getudatamem(uvalue(o));
+ case LUA_TLIGHTUSERDATA: return pvalue(o);
+ default: return NULL;
+ }
+}
+
+
+LUA_API lua_State *lua_tothread (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ return (!ttisthread(o)) ? NULL : thvalue(o);
+}
+
+
+LUA_API const void *lua_topointer (lua_State *L, int idx) {
+ StkId o = index2addr(L, idx);
+ switch (ttype(o)) {
+ case LUA_TTABLE: return hvalue(o);
+ case LUA_TLCL: return clLvalue(o);
+ case LUA_TCCL: return clCvalue(o);
+ case LUA_TLCF: return cast(void *, cast(size_t, fvalue(o)));
+ case LUA_TTHREAD: return thvalue(o);
+ case LUA_TUSERDATA: return getudatamem(uvalue(o));
+ case LUA_TLIGHTUSERDATA: return pvalue(o);
+ default: return NULL;
+ }
+}
+
+
+
+/*
+** push functions (C -> stack)
+*/
+
+
+LUA_API void lua_pushnil (lua_State *L) {
+ lua_lock(L);
+ setnilvalue(L->top);
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_pushnumber (lua_State *L, lua_Number n) {
+ lua_lock(L);
+ setfltvalue(L->top, n);
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_pushinteger (lua_State *L, lua_Integer n) {
+ lua_lock(L);
+ setivalue(L->top, n);
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+/*
+** Pushes on the stack a string with given length. Avoid using 's' when
+** 'len' == 0 (as 's' can be NULL in that case), due to later use of
+** 'memcmp' and 'memcpy'.
+*/
+LUA_API const char *lua_pushlstring (lua_State *L, const char *s, size_t len) {
+ TString *ts;
+ lua_lock(L);
+ ts = (len == 0) ? luaS_new(L, "") : luaS_newlstr(L, s, len);
+ setsvalue2s(L, L->top, ts);
+ api_incr_top(L);
+ luaC_checkGC(L);
+ lua_unlock(L);
+ return getstr(ts);
+}
+
+
+LUA_API const char *lua_pushstring (lua_State *L, const char *s) {
+ lua_lock(L);
+ if (s == NULL)
+ setnilvalue(L->top);
+ else {
+ TString *ts;
+ ts = luaS_new(L, s);
+ setsvalue2s(L, L->top, ts);
+ s = getstr(ts); /* internal copy's address */
+ }
+ api_incr_top(L);
+ luaC_checkGC(L);
+ lua_unlock(L);
+ return s;
+}
+
+
+LUA_API const char *lua_pushvfstring (lua_State *L, const char *fmt,
+ va_list argp) {
+ const char *ret;
+ lua_lock(L);
+ ret = luaO_pushvfstring(L, fmt, argp);
+ luaC_checkGC(L);
+ lua_unlock(L);
+ return ret;
+}
+
+
+LUA_API const char *lua_pushfstring (lua_State *L, const char *fmt, ...) {
+ const char *ret;
+ va_list argp;
+ lua_lock(L);
+ va_start(argp, fmt);
+ ret = luaO_pushvfstring(L, fmt, argp);
+ va_end(argp);
+ luaC_checkGC(L);
+ lua_unlock(L);
+ return ret;
+}
+
+
+LUA_API void lua_pushcclosure (lua_State *L, lua_CFunction fn, int n) {
+ lua_lock(L);
+ if (n == 0) {
+ setfvalue(L->top, fn);
+ }
+ else {
+ CClosure *cl;
+ api_checknelems(L, n);
+ api_check(L, n <= MAXUPVAL, "upvalue index too large");
+ cl = luaF_newCclosure(L, n);
+ cl->f = fn;
+ L->top -= n;
+ while (n--) {
+ setobj2n(L, &cl->upvalue[n], L->top + n);
+ /* does not need barrier because closure is white */
+ }
+ setclCvalue(L, L->top, cl);
+ }
+ api_incr_top(L);
+ luaC_checkGC(L);
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_pushboolean (lua_State *L, int b) {
+ lua_lock(L);
+ setbvalue(L->top, (b != 0)); /* ensure that true is 1 */
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_pushlightuserdata (lua_State *L, void *p) {
+ lua_lock(L);
+ setpvalue(L->top, p);
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+LUA_API int lua_pushthread (lua_State *L) {
+ lua_lock(L);
+ setthvalue(L, L->top, L);
+ api_incr_top(L);
+ lua_unlock(L);
+ return (G(L)->mainthread == L);
+}
+
+
+
+/*
+** get functions (Lua -> stack)
+*/
+
+
+static int auxgetstr (lua_State *L, const TValue *t, const char *k) {
+ const TValue *slot;
+ TString *str = luaS_new(L, k);
+ if (luaV_fastget(L, t, str, slot, luaH_getstr)) {
+ setobj2s(L, L->top, slot);
+ api_incr_top(L);
+ }
+ else {
+ setsvalue2s(L, L->top, str);
+ api_incr_top(L);
+ luaV_finishget(L, t, L->top - 1, L->top - 1, slot);
+ }
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API int lua_getglobal (lua_State *L, const char *name) {
+ Table *reg = hvalue(&G(L)->l_registry);
+ lua_lock(L);
+ return auxgetstr(L, luaH_getint(reg, LUA_RIDX_GLOBALS), name);
+}
+
+
+LUA_API int lua_gettable (lua_State *L, int idx) {
+ StkId t;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ luaV_gettable(L, t, L->top - 1, L->top - 1);
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API int lua_getfield (lua_State *L, int idx, const char *k) {
+ lua_lock(L);
+ return auxgetstr(L, index2addr(L, idx), k);
+}
+
+
+LUA_API int lua_geti (lua_State *L, int idx, lua_Integer n) {
+ StkId t;
+ const TValue *slot;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ if (luaV_fastget(L, t, n, slot, luaH_getint)) {
+ setobj2s(L, L->top, slot);
+ api_incr_top(L);
+ }
+ else {
+ setivalue(L->top, n);
+ api_incr_top(L);
+ luaV_finishget(L, t, L->top - 1, L->top - 1, slot);
+ }
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API int lua_rawget (lua_State *L, int idx) {
+ StkId t;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ api_check(L, ttistable(t), "table expected");
+ setobj2s(L, L->top - 1, luaH_get(hvalue(t), L->top - 1));
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API int lua_rawgeti (lua_State *L, int idx, lua_Integer n) {
+ StkId t;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ api_check(L, ttistable(t), "table expected");
+ setobj2s(L, L->top, luaH_getint(hvalue(t), n));
+ api_incr_top(L);
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API int lua_rawgetp (lua_State *L, int idx, const void *p) {
+ StkId t;
+ TValue k;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ api_check(L, ttistable(t), "table expected");
+ setpvalue(&k, cast(void *, p));
+ setobj2s(L, L->top, luaH_get(hvalue(t), &k));
+ api_incr_top(L);
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+LUA_API void lua_createtable (lua_State *L, int narray, int nrec) {
+ Table *t;
+ lua_lock(L);
+ t = luaH_new(L);
+ sethvalue(L, L->top, t);
+ api_incr_top(L);
+ if (narray > 0 || nrec > 0)
+ luaH_resize(L, t, narray, nrec);
+ luaC_checkGC(L);
+ lua_unlock(L);
+}
+
+
+LUA_API int lua_getmetatable (lua_State *L, int objindex) {
+ const TValue *obj;
+ Table *mt;
+ int res = 0;
+ lua_lock(L);
+ obj = index2addr(L, objindex);
+ switch (ttnov(obj)) {
+ case LUA_TTABLE:
+ mt = hvalue(obj)->metatable;
+ break;
+ case LUA_TUSERDATA:
+ mt = uvalue(obj)->metatable;
+ break;
+ default:
+ mt = G(L)->mt[ttnov(obj)];
+ break;
+ }
+ if (mt != NULL) {
+ sethvalue(L, L->top, mt);
+ api_incr_top(L);
+ res = 1;
+ }
+ lua_unlock(L);
+ return res;
+}
+
+
+LUA_API int lua_getuservalue (lua_State *L, int idx) {
+ StkId o;
+ lua_lock(L);
+ o = index2addr(L, idx);
+ api_check(L, ttisfulluserdata(o), "full userdata expected");
+ getuservalue(L, uvalue(o), L->top);
+ api_incr_top(L);
+ lua_unlock(L);
+ return ttnov(L->top - 1);
+}
+
+
+/*
+** set functions (stack -> Lua)
+*/
+
+/*
+** t[k] = value at the top of the stack (where 'k' is a string)
+*/
+static void auxsetstr (lua_State *L, const TValue *t, const char *k) {
+ const TValue *slot;
+ TString *str = luaS_new(L, k);
+ api_checknelems(L, 1);
+ if (luaV_fastset(L, t, str, slot, luaH_getstr, L->top - 1))
+ L->top--; /* pop value */
+ else {
+ setsvalue2s(L, L->top, str); /* push 'str' (to make it a TValue) */
+ api_incr_top(L);
+ luaV_finishset(L, t, L->top - 1, L->top - 2, slot);
+ L->top -= 2; /* pop value and key */
+ }
+ lua_unlock(L); /* lock done by caller */
+}
+
+
+LUA_API void lua_setglobal (lua_State *L, const char *name) {
+ Table *reg = hvalue(&G(L)->l_registry);
+ lua_lock(L); /* unlock done in 'auxsetstr' */
+ auxsetstr(L, luaH_getint(reg, LUA_RIDX_GLOBALS), name);
+}
+
+
+LUA_API void lua_settable (lua_State *L, int idx) {
+ StkId t;
+ lua_lock(L);
+ api_checknelems(L, 2);
+ t = index2addr(L, idx);
+ luaV_settable(L, t, L->top - 2, L->top - 1);
+ L->top -= 2; /* pop index and value */
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_setfield (lua_State *L, int idx, const char *k) {
+ lua_lock(L); /* unlock done in 'auxsetstr' */
+ auxsetstr(L, index2addr(L, idx), k);
+}
+
+
+LUA_API void lua_seti (lua_State *L, int idx, lua_Integer n) {
+ StkId t;
+ const TValue *slot;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ t = index2addr(L, idx);
+ if (luaV_fastset(L, t, n, slot, luaH_getint, L->top - 1))
+ L->top--; /* pop value */
+ else {
+ setivalue(L->top, n);
+ api_incr_top(L);
+ luaV_finishset(L, t, L->top - 1, L->top - 2, slot);
+ L->top -= 2; /* pop value and key */
+ }
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_rawset (lua_State *L, int idx) {
+ StkId o;
+ TValue *slot;
+ lua_lock(L);
+ api_checknelems(L, 2);
+ o = index2addr(L, idx);
+ api_check(L, ttistable(o), "table expected");
+ slot = luaH_set(L, hvalue(o), L->top - 2);
+ setobj2t(L, slot, L->top - 1);
+ invalidateTMcache(hvalue(o));
+ luaC_barrierback(L, hvalue(o), L->top-1);
+ L->top -= 2;
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_rawseti (lua_State *L, int idx, lua_Integer n) {
+ StkId o;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ o = index2addr(L, idx);
+ api_check(L, ttistable(o), "table expected");
+ luaH_setint(L, hvalue(o), n, L->top - 1);
+ luaC_barrierback(L, hvalue(o), L->top-1);
+ L->top--;
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_rawsetp (lua_State *L, int idx, const void *p) {
+ StkId o;
+ TValue k, *slot;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ o = index2addr(L, idx);
+ api_check(L, ttistable(o), "table expected");
+ setpvalue(&k, cast(void *, p));
+ slot = luaH_set(L, hvalue(o), &k);
+ setobj2t(L, slot, L->top - 1);
+ luaC_barrierback(L, hvalue(o), L->top - 1);
+ L->top--;
+ lua_unlock(L);
+}
+
+
+LUA_API int lua_setmetatable (lua_State *L, int objindex) {
+ TValue *obj;
+ Table *mt;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ obj = index2addr(L, objindex);
+ if (ttisnil(L->top - 1))
+ mt = NULL;
+ else {
+ api_check(L, ttistable(L->top - 1), "table expected");
+ mt = hvalue(L->top - 1);
+ }
+ switch (ttnov(obj)) {
+ case LUA_TTABLE: {
+ hvalue(obj)->metatable = mt;
+ if (mt) {
+ luaC_objbarrier(L, gcvalue(obj), mt);
+ luaC_checkfinalizer(L, gcvalue(obj), mt);
+ }
+ break;
+ }
+ case LUA_TUSERDATA: {
+ uvalue(obj)->metatable = mt;
+ if (mt) {
+ luaC_objbarrier(L, uvalue(obj), mt);
+ luaC_checkfinalizer(L, gcvalue(obj), mt);
+ }
+ break;
+ }
+ default: {
+ G(L)->mt[ttnov(obj)] = mt;
+ break;
+ }
+ }
+ L->top--;
+ lua_unlock(L);
+ return 1;
+}
+
+
+LUA_API void lua_setuservalue (lua_State *L, int idx) {
+ StkId o;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ o = index2addr(L, idx);
+ api_check(L, ttisfulluserdata(o), "full userdata expected");
+ setuservalue(L, uvalue(o), L->top - 1);
+ luaC_barrier(L, gcvalue(o), L->top - 1);
+ L->top--;
+ lua_unlock(L);
+}
+
+
+/*
+** 'load' and 'call' functions (run Lua code)
+*/
+
+
+#define checkresults(L,na,nr) \
+ api_check(L, (nr) == LUA_MULTRET || (L->ci->top - L->top >= (nr) - (na)), \
+ "results from function overflow current stack size")
+
+
+LUA_API void lua_callk (lua_State *L, int nargs, int nresults,
+ lua_KContext ctx, lua_KFunction k) {
+ StkId func;
+ lua_lock(L);
+ api_check(L, k == NULL || !isLua(L->ci),
+ "cannot use continuations inside hooks");
+ api_checknelems(L, nargs+1);
+ api_check(L, L->status == LUA_OK, "cannot do calls on non-normal thread");
+ checkresults(L, nargs, nresults);
+ func = L->top - (nargs+1);
+ if (k != NULL && L->nny == 0) { /* need to prepare continuation? */
+ L->ci->u.c.k = k; /* save continuation */
+ L->ci->u.c.ctx = ctx; /* save context */
+ luaD_call(L, func, nresults); /* do the call */
+ }
+ else /* no continuation or no yieldable */
+ luaD_callnoyield(L, func, nresults); /* just do the call */
+ adjustresults(L, nresults);
+ lua_unlock(L);
+}
+
+
+
+/*
+** Execute a protected call.
+*/
+struct CallS { /* data to 'f_call' */
+ StkId func;
+ int nresults;
+};
+
+
+static void f_call (lua_State *L, void *ud) {
+ struct CallS *c = cast(struct CallS *, ud);
+ luaD_callnoyield(L, c->func, c->nresults);
+}
+
+
+
+LUA_API int lua_pcallk (lua_State *L, int nargs, int nresults, int errfunc,
+ lua_KContext ctx, lua_KFunction k) {
+ struct CallS c;
+ int status;
+ ptrdiff_t func;
+ lua_lock(L);
+ api_check(L, k == NULL || !isLua(L->ci),
+ "cannot use continuations inside hooks");
+ api_checknelems(L, nargs+1);
+ api_check(L, L->status == LUA_OK, "cannot do calls on non-normal thread");
+ checkresults(L, nargs, nresults);
+ if (errfunc == 0)
+ func = 0;
+ else {
+ StkId o = index2addr(L, errfunc);
+ api_checkstackindex(L, errfunc, o);
+ func = savestack(L, o);
+ }
+ c.func = L->top - (nargs+1); /* function to be called */
+ if (k == NULL || L->nny > 0) { /* no continuation or no yieldable? */
+ c.nresults = nresults; /* do a 'conventional' protected call */
+ status = luaD_pcall(L, f_call, &c, savestack(L, c.func), func);
+ }
+ else { /* prepare continuation (call is already protected by 'resume') */
+ CallInfo *ci = L->ci;
+ ci->u.c.k = k; /* save continuation */
+ ci->u.c.ctx = ctx; /* save context */
+ /* save information for error recovery */
+ ci->extra = savestack(L, c.func);
+ ci->u.c.old_errfunc = L->errfunc;
+ L->errfunc = func;
+ setoah(ci->callstatus, L->allowhook); /* save value of 'allowhook' */
+ ci->callstatus |= CIST_YPCALL; /* function can do error recovery */
+ luaD_call(L, c.func, nresults); /* do the call */
+ ci->callstatus &= ~CIST_YPCALL;
+ L->errfunc = ci->u.c.old_errfunc;
+ status = LUA_OK; /* if it is here, there were no errors */
+ }
+ adjustresults(L, nresults);
+ lua_unlock(L);
+ return status;
+}
+
+
+LUA_API int lua_load (lua_State *L, lua_Reader reader, void *data,
+ const char *chunkname, const char *mode) {
+ ZIO z;
+ int status;
+ lua_lock(L);
+ if (!chunkname) chunkname = "?";
+ luaZ_init(L, &z, reader, data);
+ status = luaD_protectedparser(L, &z, chunkname, mode);
+ if (status == LUA_OK) { /* no errors? */
+ LClosure *f = clLvalue(L->top - 1); /* get newly created function */
+ if (f->nupvalues >= 1) { /* does it have an upvalue? */
+ /* get global table from registry */
+ Table *reg = hvalue(&G(L)->l_registry);
+ const TValue *gt = luaH_getint(reg, LUA_RIDX_GLOBALS);
+ /* set global table as 1st upvalue of 'f' (may be LUA_ENV) */
+ setobj(L, f->upvals[0]->v, gt);
+ luaC_upvalbarrier(L, f->upvals[0]);
+ }
+ }
+ lua_unlock(L);
+ return status;
+}
+
+
+LUA_API int lua_dump (lua_State *L, lua_Writer writer, void *data, int strip) {
+ int status;
+ TValue *o;
+ lua_lock(L);
+ api_checknelems(L, 1);
+ o = L->top - 1;
+ if (isLfunction(o))
+ status = luaU_dump(L, getproto(o), writer, data, strip);
+ else
+ status = 1;
+ lua_unlock(L);
+ return status;
+}
+
+
+LUA_API int lua_status (lua_State *L) {
+ return L->status;
+}
+
+
+/*
+** Garbage-collection function
+*/
+
+LUA_API int lua_gc (lua_State *L, int what, int data) {
+ int res = 0;
+ global_State *g;
+ lua_lock(L);
+ g = G(L);
+ switch (what) {
+ case LUA_GCSTOP: {
+ g->gcrunning = 0;
+ break;
+ }
+ case LUA_GCRESTART: {
+ luaE_setdebt(g, 0);
+ g->gcrunning = 1;
+ break;
+ }
+ case LUA_GCCOLLECT: {
+ luaC_fullgc(L, 0);
+ break;
+ }
+ case LUA_GCCOUNT: {
+ /* GC values are expressed in Kbytes: #bytes/2^10 */
+ res = cast_int(gettotalbytes(g) >> 10);
+ break;
+ }
+ case LUA_GCCOUNTB: {
+ res = cast_int(gettotalbytes(g) & 0x3ff);
+ break;
+ }
+ case LUA_GCSTEP: {
+ l_mem debt = 1; /* =1 to signal that it did an actual step */
+ lu_byte oldrunning = g->gcrunning;
+ g->gcrunning = 1; /* allow GC to run */
+ if (data == 0) {
+ luaE_setdebt(g, -GCSTEPSIZE); /* to do a "small" step */
+ luaC_step(L);
+ }
+ else { /* add 'data' to total debt */
+ debt = cast(l_mem, data) * 1024 + g->GCdebt;
+ luaE_setdebt(g, debt);
+ luaC_checkGC(L);
+ }
+ g->gcrunning = oldrunning; /* restore previous state */
+ if (debt > 0 && g->gcstate == GCSpause) /* end of cycle? */
+ res = 1; /* signal it */
+ break;
+ }
+ case LUA_GCSETPAUSE: {
+ res = g->gcpause;
+ g->gcpause = data;
+ break;
+ }
+ case LUA_GCSETSTEPMUL: {
+ res = g->gcstepmul;
+ if (data < 40) data = 40; /* avoid ridiculous low values (and 0) */
+ g->gcstepmul = data;
+ break;
+ }
+ case LUA_GCISRUNNING: {
+ res = g->gcrunning;
+ break;
+ }
+ default: res = -1; /* invalid option */
+ }
+ lua_unlock(L);
+ return res;
+}
+
+
+
+/*
+** miscellaneous functions
+*/
+
+
+LUA_API int lua_error (lua_State *L) {
+ lua_lock(L);
+ api_checknelems(L, 1);
+ luaG_errormsg(L);
+ /* code unreachable; will unlock when control actually leaves the kernel */
+ return 0; /* to avoid warnings */
+}
+
+
+LUA_API int lua_next (lua_State *L, int idx) {
+ StkId t;
+ int more;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ api_check(L, ttistable(t), "table expected");
+ more = luaH_next(L, hvalue(t), L->top - 1);
+ if (more) {
+ api_incr_top(L);
+ }
+ else /* no more elements */
+ L->top -= 1; /* remove key */
+ lua_unlock(L);
+ return more;
+}
+
+
+LUA_API void lua_concat (lua_State *L, int n) {
+ lua_lock(L);
+ api_checknelems(L, n);
+ if (n >= 2) {
+ luaV_concat(L, n);
+ }
+ else if (n == 0) { /* push empty string */
+ setsvalue2s(L, L->top, luaS_newlstr(L, "", 0));
+ api_incr_top(L);
+ }
+ /* else n == 1; nothing to do */
+ luaC_checkGC(L);
+ lua_unlock(L);
+}
+
+
+LUA_API void lua_len (lua_State *L, int idx) {
+ StkId t;
+ lua_lock(L);
+ t = index2addr(L, idx);
+ luaV_objlen(L, L->top, t);
+ api_incr_top(L);
+ lua_unlock(L);
+}
+
+
+LUA_API lua_Alloc lua_getallocf (lua_State *L, void **ud) {
+ lua_Alloc f;
+ lua_lock(L);
+ if (ud) *ud = G(L)->ud;
+ f = G(L)->frealloc;
+ lua_unlock(L);
+ return f;
+}
+
+
+LUA_API void lua_setallocf (lua_State *L, lua_Alloc f, void *ud) {
+ lua_lock(L);
+ G(L)->ud = ud;
+ G(L)->frealloc = f;
+ lua_unlock(L);
+}
+
+
+LUA_API void *lua_newuserdata (lua_State *L, size_t size) {
+ Udata *u;
+ lua_lock(L);
+ u = luaS_newudata(L, size);
+ setuvalue(L, L->top, u);
+ api_incr_top(L);
+ luaC_checkGC(L);
+ lua_unlock(L);
+ return getudatamem(u);
+}
+
+
+
+static const char *aux_upvalue (StkId fi, int n, TValue **val,
+ CClosure **owner, UpVal **uv) {
+ switch (ttype(fi)) {
+ case LUA_TCCL: { /* C closure */
+ CClosure *f = clCvalue(fi);
+ if (!(1 <= n && n <= f->nupvalues)) return NULL;
+ *val = &f->upvalue[n-1];
+ if (owner) *owner = f;
+ return "";
+ }
+ case LUA_TLCL: { /* Lua closure */
+ LClosure *f = clLvalue(fi);
+ TString *name;
+ Proto *p = f->p;
+ if (!(1 <= n && n <= p->sizeupvalues)) return NULL;
+ *val = f->upvals[n-1]->v;
+ if (uv) *uv = f->upvals[n - 1];
+ name = p->upvalues[n-1].name;
+ return (name == NULL) ? "(*no name)" : getstr(name);
+ }
+ default: return NULL; /* not a closure */
+ }
+}
+
+
+LUA_API const char *lua_getupvalue (lua_State *L, int funcindex, int n) {
+ const char *name;
+ TValue *val = NULL; /* to avoid warnings */
+ lua_lock(L);
+ name = aux_upvalue(index2addr(L, funcindex), n, &val, NULL, NULL);
+ if (name) {
+ setobj2s(L, L->top, val);
+ api_incr_top(L);
+ }
+ lua_unlock(L);
+ return name;
+}
+
+
+LUA_API const char *lua_setupvalue (lua_State *L, int funcindex, int n) {
+ const char *name;
+ TValue *val = NULL; /* to avoid warnings */
+ CClosure *owner = NULL;
+ UpVal *uv = NULL;
+ StkId fi;
+ lua_lock(L);
+ fi = index2addr(L, funcindex);
+ api_checknelems(L, 1);
+ name = aux_upvalue(fi, n, &val, &owner, &uv);
+ if (name) {
+ L->top--;
+ setobj(L, val, L->top);
+ if (owner) { luaC_barrier(L, owner, L->top); }
+ else if (uv) { luaC_upvalbarrier(L, uv); }
+ }
+ lua_unlock(L);
+ return name;
+}
+
+
+static UpVal **getupvalref (lua_State *L, int fidx, int n, LClosure **pf) {
+ LClosure *f;
+ StkId fi = index2addr(L, fidx);
+ api_check(L, ttisLclosure(fi), "Lua function expected");
+ f = clLvalue(fi);
+ api_check(L, (1 <= n && n <= f->p->sizeupvalues), "invalid upvalue index");
+ if (pf) *pf = f;
+ return &f->upvals[n - 1]; /* get its upvalue pointer */
+}
+
+
+LUA_API void *lua_upvalueid (lua_State *L, int fidx, int n) {
+ StkId fi = index2addr(L, fidx);
+ switch (ttype(fi)) {
+ case LUA_TLCL: { /* lua closure */
+ return *getupvalref(L, fidx, n, NULL);
+ }
+ case LUA_TCCL: { /* C closure */
+ CClosure *f = clCvalue(fi);
+ api_check(L, 1 <= n && n <= f->nupvalues, "invalid upvalue index");
+ return &f->upvalue[n - 1];
+ }
+ default: {
+ api_check(L, 0, "closure expected");
+ return NULL;
+ }
+ }
+}
+
+
+LUA_API void lua_upvaluejoin (lua_State *L, int fidx1, int n1,
+ int fidx2, int n2) {
+ LClosure *f1;
+ UpVal **up1 = getupvalref(L, fidx1, n1, &f1);
+ UpVal **up2 = getupvalref(L, fidx2, n2, NULL);
+ luaC_upvdeccount(L, *up1);
+ *up1 = *up2;
+ (*up1)->refcount++;
+ if (upisopen(*up1)) (*up1)->u.open.touched = 1;
+ luaC_upvalbarrier(L, *up1);
+}
+
+
diff --git a/lua/src/lapi.h b/lua/src/lapi.h
new file mode 100644
index 000000000..6d36dee3f
--- /dev/null
+++ b/lua/src/lapi.h
@@ -0,0 +1,24 @@
+/*
+** $Id: lapi.h,v 2.9 2015/03/06 19:49:50 roberto Exp $
+** Auxiliary functions from Lua API
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lapi_h
+#define lapi_h
+
+
+#include "llimits.h"
+#include "lstate.h"
+
+#define api_incr_top(L) {L->top++; api_check(L, L->top <= L->ci->top, \
+ "stack overflow");}
+
+#define adjustresults(L,nres) \
+ { if ((nres) == LUA_MULTRET && L->ci->top < L->top) L->ci->top = L->top; }
+
+#define api_checknelems(L,n) api_check(L, (n) < (L->top - L->ci->func), \
+ "not enough elements in the stack")
+
+
+#endif
diff --git a/lua/src/lauxlib.c b/lua/src/lauxlib.c
new file mode 100644
index 000000000..f7a383663
--- /dev/null
+++ b/lua/src/lauxlib.c
@@ -0,0 +1,1043 @@
+/*
+** $Id: lauxlib.c,v 1.289 2016/12/20 18:37:00 roberto Exp $
+** Auxiliary functions for building Lua libraries
+** See Copyright Notice in lua.h
+*/
+
+#define lauxlib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <errno.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+/*
+** This file uses only the official API of Lua.
+** Any function declared here could be written as an application function.
+*/
+
+#include "lua.h"
+
+#include "lauxlib.h"
+
+
+/*
+** {======================================================
+** Traceback
+** =======================================================
+*/
+
+
+#define LEVELS1 10 /* size of the first part of the stack */
+#define LEVELS2 11 /* size of the second part of the stack */
+
+
+
+/*
+** search for 'objidx' in table at index -1.
+** return 1 + string at top if find a good name.
+*/
+static int findfield (lua_State *L, int objidx, int level) {
+ if (level == 0 || !lua_istable(L, -1))
+ return 0; /* not found */
+ lua_pushnil(L); /* start 'next' loop */
+ while (lua_next(L, -2)) { /* for each pair in table */
+ if (lua_type(L, -2) == LUA_TSTRING) { /* ignore non-string keys */
+ if (lua_rawequal(L, objidx, -1)) { /* found object? */
+ lua_pop(L, 1); /* remove value (but keep name) */
+ return 1;
+ }
+ else if (findfield(L, objidx, level - 1)) { /* try recursively */
+ lua_remove(L, -2); /* remove table (but keep name) */
+ lua_pushliteral(L, ".");
+ lua_insert(L, -2); /* place '.' between the two names */
+ lua_concat(L, 3);
+ return 1;
+ }
+ }
+ lua_pop(L, 1); /* remove value */
+ }
+ return 0; /* not found */
+}
+
+
+/*
+** Search for a name for a function in all loaded modules
+*/
+static int pushglobalfuncname (lua_State *L, lua_Debug *ar) {
+ int top = lua_gettop(L);
+ lua_getinfo(L, "f", ar); /* push function */
+ lua_getfield(L, LUA_REGISTRYINDEX, LUA_LOADED_TABLE);
+ if (findfield(L, top + 1, 2)) {
+ const char *name = lua_tostring(L, -1);
+ if (strncmp(name, "_G.", 3) == 0) { /* name start with '_G.'? */
+ lua_pushstring(L, name + 3); /* push name without prefix */
+ lua_remove(L, -2); /* remove original name */
+ }
+ lua_copy(L, -1, top + 1); /* move name to proper place */
+ lua_pop(L, 2); /* remove pushed values */
+ return 1;
+ }
+ else {
+ lua_settop(L, top); /* remove function and global table */
+ return 0;
+ }
+}
+
+
+static void pushfuncname (lua_State *L, lua_Debug *ar) {
+ if (pushglobalfuncname(L, ar)) { /* try first a global name */
+ lua_pushfstring(L, "function '%s'", lua_tostring(L, -1));
+ lua_remove(L, -2); /* remove name */
+ }
+ else if (*ar->namewhat != '\0') /* is there a name from code? */
+ lua_pushfstring(L, "%s '%s'", ar->namewhat, ar->name); /* use it */
+ else if (*ar->what == 'm') /* main? */
+ lua_pushliteral(L, "main chunk");
+ else if (*ar->what != 'C') /* for Lua functions, use <file:line> */
+ lua_pushfstring(L, "function <%s:%d>", ar->short_src, ar->linedefined);
+ else /* nothing left... */
+ lua_pushliteral(L, "?");
+}
+
+
+static int lastlevel (lua_State *L) {
+ lua_Debug ar;
+ int li = 1, le = 1;
+ /* find an upper bound */
+ while (lua_getstack(L, le, &ar)) { li = le; le *= 2; }
+ /* do a binary search */
+ while (li < le) {
+ int m = (li + le)/2;
+ if (lua_getstack(L, m, &ar)) li = m + 1;
+ else le = m;
+ }
+ return le - 1;
+}
+
+
+LUALIB_API void luaL_traceback (lua_State *L, lua_State *L1,
+ const char *msg, int level) {
+ lua_Debug ar;
+ int top = lua_gettop(L);
+ int last = lastlevel(L1);
+ int n1 = (last - level > LEVELS1 + LEVELS2) ? LEVELS1 : -1;
+ if (msg)
+ lua_pushfstring(L, "%s\n", msg);
+ luaL_checkstack(L, 10, NULL);
+ lua_pushliteral(L, "stack traceback:");
+ while (lua_getstack(L1, level++, &ar)) {
+ if (n1-- == 0) { /* too many levels? */
+ lua_pushliteral(L, "\n\t..."); /* add a '...' */
+ level = last - LEVELS2 + 1; /* and skip to last ones */
+ }
+ else {
+ lua_getinfo(L1, "Slnt", &ar);
+ lua_pushfstring(L, "\n\t%s:", ar.short_src);
+ if (ar.currentline > 0)
+ lua_pushfstring(L, "%d:", ar.currentline);
+ lua_pushliteral(L, " in ");
+ pushfuncname(L, &ar);
+ if (ar.istailcall)
+ lua_pushliteral(L, "\n\t(...tail calls...)");
+ lua_concat(L, lua_gettop(L) - top);
+ }
+ }
+ lua_concat(L, lua_gettop(L) - top);
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Error-report functions
+** =======================================================
+*/
+
+LUALIB_API int luaL_argerror (lua_State *L, int arg, const char *extramsg) {
+ lua_Debug ar;
+ if (!lua_getstack(L, 0, &ar)) /* no stack frame? */
+ return luaL_error(L, "bad argument #%d (%s)", arg, extramsg);
+ lua_getinfo(L, "n", &ar);
+ if (strcmp(ar.namewhat, "method") == 0) {
+ arg--; /* do not count 'self' */
+ if (arg == 0) /* error is in the self argument itself? */
+ return luaL_error(L, "calling '%s' on bad self (%s)",
+ ar.name, extramsg);
+ }
+ if (ar.name == NULL)
+ ar.name = (pushglobalfuncname(L, &ar)) ? lua_tostring(L, -1) : "?";
+ return luaL_error(L, "bad argument #%d to '%s' (%s)",
+ arg, ar.name, extramsg);
+}
+
+
+static int typeerror (lua_State *L, int arg, const char *tname) {
+ const char *msg;
+ const char *typearg; /* name for the type of the actual argument */
+ if (luaL_getmetafield(L, arg, "__name") == LUA_TSTRING)
+ typearg = lua_tostring(L, -1); /* use the given type name */
+ else if (lua_type(L, arg) == LUA_TLIGHTUSERDATA)
+ typearg = "light userdata"; /* special name for messages */
+ else
+ typearg = luaL_typename(L, arg); /* standard name */
+ msg = lua_pushfstring(L, "%s expected, got %s", tname, typearg);
+ return luaL_argerror(L, arg, msg);
+}
+
+
+static void tag_error (lua_State *L, int arg, int tag) {
+ typeerror(L, arg, lua_typename(L, tag));
+}
+
+
+/*
+** The use of 'lua_pushfstring' ensures this function does not
+** need reserved stack space when called.
+*/
+LUALIB_API void luaL_where (lua_State *L, int level) {
+ lua_Debug ar;
+ if (lua_getstack(L, level, &ar)) { /* check function at level */
+ lua_getinfo(L, "Sl", &ar); /* get info about it */
+ if (ar.currentline > 0) { /* is there info? */
+ lua_pushfstring(L, "%s:%d: ", ar.short_src, ar.currentline);
+ return;
+ }
+ }
+ lua_pushfstring(L, ""); /* else, no information available... */
+}
+
+
+/*
+** Again, the use of 'lua_pushvfstring' ensures this function does
+** not need reserved stack space when called. (At worst, it generates
+** an error with "stack overflow" instead of the given message.)
+*/
+LUALIB_API int luaL_error (lua_State *L, const char *fmt, ...) {
+ va_list argp;
+ va_start(argp, fmt);
+ luaL_where(L, 1);
+ lua_pushvfstring(L, fmt, argp);
+ va_end(argp);
+ lua_concat(L, 2);
+ return lua_error(L);
+}
+
+
+LUALIB_API int luaL_fileresult (lua_State *L, int stat, const char *fname) {
+ int en = errno; /* calls to Lua API may change this value */
+ if (stat) {
+ lua_pushboolean(L, 1);
+ return 1;
+ }
+ else {
+ lua_pushnil(L);
+ if (fname)
+ lua_pushfstring(L, "%s: %s", fname, strerror(en));
+ else
+ lua_pushstring(L, strerror(en));
+ lua_pushinteger(L, en);
+ return 3;
+ }
+}
+
+
+#if !defined(l_inspectstat) /* { */
+
+#if defined(LUA_USE_POSIX)
+
+#include <sys/wait.h>
+
+/*
+** use appropriate macros to interpret 'pclose' return status
+*/
+#define l_inspectstat(stat,what) \
+ if (WIFEXITED(stat)) { stat = WEXITSTATUS(stat); } \
+ else if (WIFSIGNALED(stat)) { stat = WTERMSIG(stat); what = "signal"; }
+
+#else
+
+#define l_inspectstat(stat,what) /* no op */
+
+#endif
+
+#endif /* } */
+
+
+LUALIB_API int luaL_execresult (lua_State *L, int stat) {
+ const char *what = "exit"; /* type of termination */
+ if (stat == -1) /* error? */
+ return luaL_fileresult(L, 0, NULL);
+ else {
+ l_inspectstat(stat, what); /* interpret result */
+ if (*what == 'e' && stat == 0) /* successful termination? */
+ lua_pushboolean(L, 1);
+ else
+ lua_pushnil(L);
+ lua_pushstring(L, what);
+ lua_pushinteger(L, stat);
+ return 3; /* return true/nil,what,code */
+ }
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Userdata's metatable manipulation
+** =======================================================
+*/
+
+LUALIB_API int luaL_newmetatable (lua_State *L, const char *tname) {
+ if (luaL_getmetatable(L, tname) != LUA_TNIL) /* name already in use? */
+ return 0; /* leave previous value on top, but return 0 */
+ lua_pop(L, 1);
+ lua_createtable(L, 0, 2); /* create metatable */
+ lua_pushstring(L, tname);
+ lua_setfield(L, -2, "__name"); /* metatable.__name = tname */
+ lua_pushvalue(L, -1);
+ lua_setfield(L, LUA_REGISTRYINDEX, tname); /* registry.name = metatable */
+ return 1;
+}
+
+
+LUALIB_API void luaL_setmetatable (lua_State *L, const char *tname) {
+ luaL_getmetatable(L, tname);
+ lua_setmetatable(L, -2);
+}
+
+
+LUALIB_API void *luaL_testudata (lua_State *L, int ud, const char *tname) {
+ void *p = lua_touserdata(L, ud);
+ if (p != NULL) { /* value is a userdata? */
+ if (lua_getmetatable(L, ud)) { /* does it have a metatable? */
+ luaL_getmetatable(L, tname); /* get correct metatable */
+ if (!lua_rawequal(L, -1, -2)) /* not the same? */
+ p = NULL; /* value is a userdata with wrong metatable */
+ lua_pop(L, 2); /* remove both metatables */
+ return p;
+ }
+ }
+ return NULL; /* value is not a userdata with a metatable */
+}
+
+
+LUALIB_API void *luaL_checkudata (lua_State *L, int ud, const char *tname) {
+ void *p = luaL_testudata(L, ud, tname);
+ if (p == NULL) typeerror(L, ud, tname);
+ return p;
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Argument check functions
+** =======================================================
+*/
+
+LUALIB_API int luaL_checkoption (lua_State *L, int arg, const char *def,
+ const char *const lst[]) {
+ const char *name = (def) ? luaL_optstring(L, arg, def) :
+ luaL_checkstring(L, arg);
+ int i;
+ for (i=0; lst[i]; i++)
+ if (strcmp(lst[i], name) == 0)
+ return i;
+ return luaL_argerror(L, arg,
+ lua_pushfstring(L, "invalid option '%s'", name));
+}
+
+
+/*
+** Ensures the stack has at least 'space' extra slots, raising an error
+** if it cannot fulfill the request. (The error handling needs a few
+** extra slots to format the error message. In case of an error without
+** this extra space, Lua will generate the same 'stack overflow' error,
+** but without 'msg'.)
+*/
+LUALIB_API void luaL_checkstack (lua_State *L, int space, const char *msg) {
+ if (!lua_checkstack(L, space)) {
+ if (msg)
+ luaL_error(L, "stack overflow (%s)", msg);
+ else
+ luaL_error(L, "stack overflow");
+ }
+}
+
+
+LUALIB_API void luaL_checktype (lua_State *L, int arg, int t) {
+ if (lua_type(L, arg) != t)
+ tag_error(L, arg, t);
+}
+
+
+LUALIB_API void luaL_checkany (lua_State *L, int arg) {
+ if (lua_type(L, arg) == LUA_TNONE)
+ luaL_argerror(L, arg, "value expected");
+}
+
+
+LUALIB_API const char *luaL_checklstring (lua_State *L, int arg, size_t *len) {
+ const char *s = lua_tolstring(L, arg, len);
+ if (!s) tag_error(L, arg, LUA_TSTRING);
+ return s;
+}
+
+
+LUALIB_API const char *luaL_optlstring (lua_State *L, int arg,
+ const char *def, size_t *len) {
+ if (lua_isnoneornil(L, arg)) {
+ if (len)
+ *len = (def ? strlen(def) : 0);
+ return def;
+ }
+ else return luaL_checklstring(L, arg, len);
+}
+
+
+LUALIB_API lua_Number luaL_checknumber (lua_State *L, int arg) {
+ int isnum;
+ lua_Number d = lua_tonumberx(L, arg, &isnum);
+ if (!isnum)
+ tag_error(L, arg, LUA_TNUMBER);
+ return d;
+}
+
+
+LUALIB_API lua_Number luaL_optnumber (lua_State *L, int arg, lua_Number def) {
+ return luaL_opt(L, luaL_checknumber, arg, def);
+}
+
+
+static void interror (lua_State *L, int arg) {
+ if (lua_isnumber(L, arg))
+ luaL_argerror(L, arg, "number has no integer representation");
+ else
+ tag_error(L, arg, LUA_TNUMBER);
+}
+
+
+LUALIB_API lua_Integer luaL_checkinteger (lua_State *L, int arg) {
+ int isnum;
+ lua_Integer d = lua_tointegerx(L, arg, &isnum);
+ if (!isnum) {
+ interror(L, arg);
+ }
+ return d;
+}
+
+
+LUALIB_API lua_Integer luaL_optinteger (lua_State *L, int arg,
+ lua_Integer def) {
+ return luaL_opt(L, luaL_checkinteger, arg, def);
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Generic Buffer manipulation
+** =======================================================
+*/
+
+/* userdata to box arbitrary data */
+typedef struct UBox {
+ void *box;
+ size_t bsize;
+} UBox;
+
+
+static void *resizebox (lua_State *L, int idx, size_t newsize) {
+ void *ud;
+ lua_Alloc allocf = lua_getallocf(L, &ud);
+ UBox *box = (UBox *)lua_touserdata(L, idx);
+ void *temp = allocf(ud, box->box, box->bsize, newsize);
+ if (temp == NULL && newsize > 0) { /* allocation error? */
+ resizebox(L, idx, 0); /* free buffer */
+ luaL_error(L, "not enough memory for buffer allocation");
+ }
+ box->box = temp;
+ box->bsize = newsize;
+ return temp;
+}
+
+
+static int boxgc (lua_State *L) {
+ resizebox(L, 1, 0);
+ return 0;
+}
+
+
+static void *newbox (lua_State *L, size_t newsize) {
+ UBox *box = (UBox *)lua_newuserdata(L, sizeof(UBox));
+ box->box = NULL;
+ box->bsize = 0;
+ if (luaL_newmetatable(L, "LUABOX")) { /* creating metatable? */
+ lua_pushcfunction(L, boxgc);
+ lua_setfield(L, -2, "__gc"); /* metatable.__gc = boxgc */
+ }
+ lua_setmetatable(L, -2);
+ return resizebox(L, -1, newsize);
+}
+
+
+/*
+** check whether buffer is using a userdata on the stack as a temporary
+** buffer
+*/
+#define buffonstack(B) ((B)->b != (B)->initb)
+
+
+/*
+** returns a pointer to a free area with at least 'sz' bytes
+*/
+LUALIB_API char *luaL_prepbuffsize (luaL_Buffer *B, size_t sz) {
+ lua_State *L = B->L;
+ if (B->size - B->n < sz) { /* not enough space? */
+ char *newbuff;
+ size_t newsize = B->size * 2; /* double buffer size */
+ if (newsize - B->n < sz) /* not big enough? */
+ newsize = B->n + sz;
+ if (newsize < B->n || newsize - B->n < sz)
+ luaL_error(L, "buffer too large");
+ /* create larger buffer */
+ if (buffonstack(B))
+ newbuff = (char *)resizebox(L, -1, newsize);
+ else { /* no buffer yet */
+ newbuff = (char *)newbox(L, newsize);
+ memcpy(newbuff, B->b, B->n * sizeof(char)); /* copy original content */
+ }
+ B->b = newbuff;
+ B->size = newsize;
+ }
+ return &B->b[B->n];
+}
+
+
+LUALIB_API void luaL_addlstring (luaL_Buffer *B, const char *s, size_t l) {
+ if (l > 0) { /* avoid 'memcpy' when 's' can be NULL */
+ char *b = luaL_prepbuffsize(B, l);
+ memcpy(b, s, l * sizeof(char));
+ luaL_addsize(B, l);
+ }
+}
+
+
+LUALIB_API void luaL_addstring (luaL_Buffer *B, const char *s) {
+ luaL_addlstring(B, s, strlen(s));
+}
+
+
+LUALIB_API void luaL_pushresult (luaL_Buffer *B) {
+ lua_State *L = B->L;
+ lua_pushlstring(L, B->b, B->n);
+ if (buffonstack(B)) {
+ resizebox(L, -2, 0); /* delete old buffer */
+ lua_remove(L, -2); /* remove its header from the stack */
+ }
+}
+
+
+LUALIB_API void luaL_pushresultsize (luaL_Buffer *B, size_t sz) {
+ luaL_addsize(B, sz);
+ luaL_pushresult(B);
+}
+
+
+LUALIB_API void luaL_addvalue (luaL_Buffer *B) {
+ lua_State *L = B->L;
+ size_t l;
+ const char *s = lua_tolstring(L, -1, &l);
+ if (buffonstack(B))
+ lua_insert(L, -2); /* put value below buffer */
+ luaL_addlstring(B, s, l);
+ lua_remove(L, (buffonstack(B)) ? -2 : -1); /* remove value */
+}
+
+
+LUALIB_API void luaL_buffinit (lua_State *L, luaL_Buffer *B) {
+ B->L = L;
+ B->b = B->initb;
+ B->n = 0;
+ B->size = LUAL_BUFFERSIZE;
+}
+
+
+LUALIB_API char *luaL_buffinitsize (lua_State *L, luaL_Buffer *B, size_t sz) {
+ luaL_buffinit(L, B);
+ return luaL_prepbuffsize(B, sz);
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Reference system
+** =======================================================
+*/
+
+/* index of free-list header */
+#define freelist 0
+
+
+LUALIB_API int luaL_ref (lua_State *L, int t) {
+ int ref;
+ if (lua_isnil(L, -1)) {
+ lua_pop(L, 1); /* remove from stack */
+ return LUA_REFNIL; /* 'nil' has a unique fixed reference */
+ }
+ t = lua_absindex(L, t);
+ lua_rawgeti(L, t, freelist); /* get first free element */
+ ref = (int)lua_tointeger(L, -1); /* ref = t[freelist] */
+ lua_pop(L, 1); /* remove it from stack */
+ if (ref != 0) { /* any free element? */
+ lua_rawgeti(L, t, ref); /* remove it from list */
+ lua_rawseti(L, t, freelist); /* (t[freelist] = t[ref]) */
+ }
+ else /* no free elements */
+ ref = (int)lua_rawlen(L, t) + 1; /* get a new reference */
+ lua_rawseti(L, t, ref);
+ return ref;
+}
+
+
+LUALIB_API void luaL_unref (lua_State *L, int t, int ref) {
+ if (ref >= 0) {
+ t = lua_absindex(L, t);
+ lua_rawgeti(L, t, freelist);
+ lua_rawseti(L, t, ref); /* t[ref] = t[freelist] */
+ lua_pushinteger(L, ref);
+ lua_rawseti(L, t, freelist); /* t[freelist] = ref */
+ }
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Load functions
+** =======================================================
+*/
+
+typedef struct LoadF {
+ int n; /* number of pre-read characters */
+ FILE *f; /* file being read */
+ char buff[BUFSIZ]; /* area for reading file */
+} LoadF;
+
+
+static const char *getF (lua_State *L, void *ud, size_t *size) {
+ LoadF *lf = (LoadF *)ud;
+ (void)L; /* not used */
+ if (lf->n > 0) { /* are there pre-read characters to be read? */
+ *size = lf->n; /* return them (chars already in buffer) */
+ lf->n = 0; /* no more pre-read characters */
+ }
+ else { /* read a block from file */
+ /* 'fread' can return > 0 *and* set the EOF flag. If next call to
+ 'getF' called 'fread', it might still wait for user input.
+ The next check avoids this problem. */
+ if (feof(lf->f)) return NULL;
+ *size = fread(lf->buff, 1, sizeof(lf->buff), lf->f); /* read block */
+ }
+ return lf->buff;
+}
+
+
+static int errfile (lua_State *L, const char *what, int fnameindex) {
+ const char *serr = strerror(errno);
+ const char *filename = lua_tostring(L, fnameindex) + 1;
+ lua_pushfstring(L, "cannot %s %s: %s", what, filename, serr);
+ lua_remove(L, fnameindex);
+ return LUA_ERRFILE;
+}
+
+
+static int skipBOM (LoadF *lf) {
+ const char *p = "\xEF\xBB\xBF"; /* UTF-8 BOM mark */
+ int c;
+ lf->n = 0;
+ do {
+ c = getc(lf->f);
+ if (c == EOF || c != *(const unsigned char *)p++) return c;
+ lf->buff[lf->n++] = c; /* to be read by the parser */
+ } while (*p != '\0');
+ lf->n = 0; /* prefix matched; discard it */
+ return getc(lf->f); /* return next character */
+}
+
+
+/*
+** reads the first character of file 'f' and skips an optional BOM mark
+** in its beginning plus its first line if it starts with '#'. Returns
+** true if it skipped the first line. In any case, '*cp' has the
+** first "valid" character of the file (after the optional BOM and
+** a first-line comment).
+*/
+static int skipcomment (LoadF *lf, int *cp) {
+ int c = *cp = skipBOM(lf);
+ if (c == '#') { /* first line is a comment (Unix exec. file)? */
+ do { /* skip first line */
+ c = getc(lf->f);
+ } while (c != EOF && c != '\n');
+ *cp = getc(lf->f); /* skip end-of-line, if present */
+ return 1; /* there was a comment */
+ }
+ else return 0; /* no comment */
+}
+
+
+LUALIB_API int luaL_loadfilex (lua_State *L, const char *filename,
+ const char *mode) {
+ LoadF lf;
+ int status, readstatus;
+ int c;
+ int fnameindex = lua_gettop(L) + 1; /* index of filename on the stack */
+ if (filename == NULL) {
+ lua_pushliteral(L, "=stdin");
+ lf.f = stdin;
+ }
+ else {
+ lua_pushfstring(L, "@%s", filename);
+ lf.f = fopen(filename, "r");
+ if (lf.f == NULL) return errfile(L, "open", fnameindex);
+ }
+ if (skipcomment(&lf, &c)) /* read initial portion */
+ lf.buff[lf.n++] = '\n'; /* add line to correct line numbers */
+ if (c == LUA_SIGNATURE[0] && filename) { /* binary file? */
+ lf.f = freopen(filename, "rb", lf.f); /* reopen in binary mode */
+ if (lf.f == NULL) return errfile(L, "reopen", fnameindex);
+ skipcomment(&lf, &c); /* re-read initial portion */
+ }
+ if (c != EOF)
+ lf.buff[lf.n++] = c; /* 'c' is the first character of the stream */
+ status = lua_load(L, getF, &lf, lua_tostring(L, -1), mode);
+ readstatus = ferror(lf.f);
+ if (filename) fclose(lf.f); /* close file (even in case of errors) */
+ if (readstatus) {
+ lua_settop(L, fnameindex); /* ignore results from 'lua_load' */
+ return errfile(L, "read", fnameindex);
+ }
+ lua_remove(L, fnameindex);
+ return status;
+}
+
+
+typedef struct LoadS {
+ const char *s;
+ size_t size;
+} LoadS;
+
+
+static const char *getS (lua_State *L, void *ud, size_t *size) {
+ LoadS *ls = (LoadS *)ud;
+ (void)L; /* not used */
+ if (ls->size == 0) return NULL;
+ *size = ls->size;
+ ls->size = 0;
+ return ls->s;
+}
+
+
+LUALIB_API int luaL_loadbufferx (lua_State *L, const char *buff, size_t size,
+ const char *name, const char *mode) {
+ LoadS ls;
+ ls.s = buff;
+ ls.size = size;
+ return lua_load(L, getS, &ls, name, mode);
+}
+
+
+LUALIB_API int luaL_loadstring (lua_State *L, const char *s) {
+ return luaL_loadbuffer(L, s, strlen(s), s);
+}
+
+/* }====================================================== */
+
+
+
+LUALIB_API int luaL_getmetafield (lua_State *L, int obj, const char *event) {
+ if (!lua_getmetatable(L, obj)) /* no metatable? */
+ return LUA_TNIL;
+ else {
+ int tt;
+ lua_pushstring(L, event);
+ tt = lua_rawget(L, -2);
+ if (tt == LUA_TNIL) /* is metafield nil? */
+ lua_pop(L, 2); /* remove metatable and metafield */
+ else
+ lua_remove(L, -2); /* remove only metatable */
+ return tt; /* return metafield type */
+ }
+}
+
+
+LUALIB_API int luaL_callmeta (lua_State *L, int obj, const char *event) {
+ obj = lua_absindex(L, obj);
+ if (luaL_getmetafield(L, obj, event) == LUA_TNIL) /* no metafield? */
+ return 0;
+ lua_pushvalue(L, obj);
+ lua_call(L, 1, 1);
+ return 1;
+}
+
+
+LUALIB_API lua_Integer luaL_len (lua_State *L, int idx) {
+ lua_Integer l;
+ int isnum;
+ lua_len(L, idx);
+ l = lua_tointegerx(L, -1, &isnum);
+ if (!isnum)
+ luaL_error(L, "object length is not an integer");
+ lua_pop(L, 1); /* remove object */
+ return l;
+}
+
+
+LUALIB_API const char *luaL_tolstring (lua_State *L, int idx, size_t *len) {
+ if (luaL_callmeta(L, idx, "__tostring")) { /* metafield? */
+ if (!lua_isstring(L, -1))
+ luaL_error(L, "'__tostring' must return a string");
+ }
+ else {
+ switch (lua_type(L, idx)) {
+ case LUA_TNUMBER: {
+ if (lua_isinteger(L, idx))
+ lua_pushfstring(L, "%I", (LUAI_UACINT)lua_tointeger(L, idx));
+ else
+ lua_pushfstring(L, "%f", (LUAI_UACNUMBER)lua_tonumber(L, idx));
+ break;
+ }
+ case LUA_TSTRING:
+ lua_pushvalue(L, idx);
+ break;
+ case LUA_TBOOLEAN:
+ lua_pushstring(L, (lua_toboolean(L, idx) ? "true" : "false"));
+ break;
+ case LUA_TNIL:
+ lua_pushliteral(L, "nil");
+ break;
+ default: {
+ int tt = luaL_getmetafield(L, idx, "__name"); /* try name */
+ const char *kind = (tt == LUA_TSTRING) ? lua_tostring(L, -1) :
+ luaL_typename(L, idx);
+ lua_pushfstring(L, "%s: %p", kind, lua_topointer(L, idx));
+ if (tt != LUA_TNIL)
+ lua_remove(L, -2); /* remove '__name' */
+ break;
+ }
+ }
+ }
+ return lua_tolstring(L, -1, len);
+}
+
+
+/*
+** {======================================================
+** Compatibility with 5.1 module functions
+** =======================================================
+*/
+#if defined(LUA_COMPAT_MODULE)
+
+static const char *luaL_findtable (lua_State *L, int idx,
+ const char *fname, int szhint) {
+ const char *e;
+ if (idx) lua_pushvalue(L, idx);
+ do {
+ e = strchr(fname, '.');
+ if (e == NULL) e = fname + strlen(fname);
+ lua_pushlstring(L, fname, e - fname);
+ if (lua_rawget(L, -2) == LUA_TNIL) { /* no such field? */
+ lua_pop(L, 1); /* remove this nil */
+ lua_createtable(L, 0, (*e == '.' ? 1 : szhint)); /* new table for field */
+ lua_pushlstring(L, fname, e - fname);
+ lua_pushvalue(L, -2);
+ lua_settable(L, -4); /* set new table into field */
+ }
+ else if (!lua_istable(L, -1)) { /* field has a non-table value? */
+ lua_pop(L, 2); /* remove table and value */
+ return fname; /* return problematic part of the name */
+ }
+ lua_remove(L, -2); /* remove previous table */
+ fname = e + 1;
+ } while (*e == '.');
+ return NULL;
+}
+
+
+/*
+** Count number of elements in a luaL_Reg list.
+*/
+static int libsize (const luaL_Reg *l) {
+ int size = 0;
+ for (; l && l->name; l++) size++;
+ return size;
+}
+
+
+/*
+** Find or create a module table with a given name. The function
+** first looks at the LOADED table and, if that fails, try a
+** global variable with that name. In any case, leaves on the stack
+** the module table.
+*/
+LUALIB_API void luaL_pushmodule (lua_State *L, const char *modname,
+ int sizehint) {
+ luaL_findtable(L, LUA_REGISTRYINDEX, LUA_LOADED_TABLE, 1);
+ if (lua_getfield(L, -1, modname) != LUA_TTABLE) { /* no LOADED[modname]? */
+ lua_pop(L, 1); /* remove previous result */
+ /* try global variable (and create one if it does not exist) */
+ lua_pushglobaltable(L);
+ if (luaL_findtable(L, 0, modname, sizehint) != NULL)
+ luaL_error(L, "name conflict for module '%s'", modname);
+ lua_pushvalue(L, -1);
+ lua_setfield(L, -3, modname); /* LOADED[modname] = new table */
+ }
+ lua_remove(L, -2); /* remove LOADED table */
+}
+
+
+LUALIB_API void luaL_openlib (lua_State *L, const char *libname,
+ const luaL_Reg *l, int nup) {
+ luaL_checkversion(L);
+ if (libname) {
+ luaL_pushmodule(L, libname, libsize(l)); /* get/create library table */
+ lua_insert(L, -(nup + 1)); /* move library table to below upvalues */
+ }
+ if (l)
+ luaL_setfuncs(L, l, nup);
+ else
+ lua_pop(L, nup); /* remove upvalues */
+}
+
+#endif
+/* }====================================================== */
+
+/*
+** set functions from list 'l' into table at top - 'nup'; each
+** function gets the 'nup' elements at the top as upvalues.
+** Returns with only the table at the stack.
+*/
+LUALIB_API void luaL_setfuncs (lua_State *L, const luaL_Reg *l, int nup) {
+ luaL_checkstack(L, nup, "too many upvalues");
+ for (; l->name != NULL; l++) { /* fill the table with given functions */
+ int i;
+ for (i = 0; i < nup; i++) /* copy upvalues to the top */
+ lua_pushvalue(L, -nup);
+ lua_pushcclosure(L, l->func, nup); /* closure with those upvalues */
+ lua_setfield(L, -(nup + 2), l->name);
+ }
+ lua_pop(L, nup); /* remove upvalues */
+}
+
+
+/*
+** ensure that stack[idx][fname] has a table and push that table
+** into the stack
+*/
+LUALIB_API int luaL_getsubtable (lua_State *L, int idx, const char *fname) {
+ if (lua_getfield(L, idx, fname) == LUA_TTABLE)
+ return 1; /* table already there */
+ else {
+ lua_pop(L, 1); /* remove previous result */
+ idx = lua_absindex(L, idx);
+ lua_newtable(L);
+ lua_pushvalue(L, -1); /* copy to be left at top */
+ lua_setfield(L, idx, fname); /* assign new table to field */
+ return 0; /* false, because did not find table there */
+ }
+}
+
+
+/*
+** Stripped-down 'require': After checking "loaded" table, calls 'openf'
+** to open a module, registers the result in 'package.loaded' table and,
+** if 'glb' is true, also registers the result in the global table.
+** Leaves resulting module on the top.
+*/
+LUALIB_API void luaL_requiref (lua_State *L, const char *modname,
+ lua_CFunction openf, int glb) {
+ luaL_getsubtable(L, LUA_REGISTRYINDEX, LUA_LOADED_TABLE);
+ lua_getfield(L, -1, modname); /* LOADED[modname] */
+ if (!lua_toboolean(L, -1)) { /* package not already loaded? */
+ lua_pop(L, 1); /* remove field */
+ lua_pushcfunction(L, openf);
+ lua_pushstring(L, modname); /* argument to open function */
+ lua_call(L, 1, 1); /* call 'openf' to open module */
+ lua_pushvalue(L, -1); /* make copy of module (call result) */
+ lua_setfield(L, -3, modname); /* LOADED[modname] = module */
+ }
+ lua_remove(L, -2); /* remove LOADED table */
+ if (glb) {
+ lua_pushvalue(L, -1); /* copy of module */
+ lua_setglobal(L, modname); /* _G[modname] = module */
+ }
+}
+
+
+LUALIB_API const char *luaL_gsub (lua_State *L, const char *s, const char *p,
+ const char *r) {
+ const char *wild;
+ size_t l = strlen(p);
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ while ((wild = strstr(s, p)) != NULL) {
+ luaL_addlstring(&b, s, wild - s); /* push prefix */
+ luaL_addstring(&b, r); /* push replacement in place of pattern */
+ s = wild + l; /* continue after 'p' */
+ }
+ luaL_addstring(&b, s); /* push last suffix */
+ luaL_pushresult(&b);
+ return lua_tostring(L, -1);
+}
+
+
+static void *l_alloc (void *ud, void *ptr, size_t osize, size_t nsize) {
+ (void)ud; (void)osize; /* not used */
+ if (nsize == 0) {
+ free(ptr);
+ return NULL;
+ }
+ else
+ return realloc(ptr, nsize);
+}
+
+
+static int panic (lua_State *L) {
+ lua_writestringerror("PANIC: unprotected error in call to Lua API (%s)\n",
+ lua_tostring(L, -1));
+ return 0; /* return to Lua to abort */
+}
+
+
+LUALIB_API lua_State *luaL_newstate (void) {
+ lua_State *L = lua_newstate(l_alloc, NULL);
+ if (L) lua_atpanic(L, &panic);
+ return L;
+}
+
+
+LUALIB_API void luaL_checkversion_ (lua_State *L, lua_Number ver, size_t sz) {
+ const lua_Number *v = lua_version(L);
+ if (sz != LUAL_NUMSIZES) /* check numeric types */
+ luaL_error(L, "core and library have incompatible numeric types");
+ if (v != lua_version(NULL))
+ luaL_error(L, "multiple Lua VMs detected");
+ else if (*v != ver)
+ luaL_error(L, "version mismatch: app. needs %f, Lua core provides %f",
+ (LUAI_UACNUMBER)ver, (LUAI_UACNUMBER)*v);
+}
+
diff --git a/lua/src/lauxlib.h b/lua/src/lauxlib.h
new file mode 100644
index 000000000..9a2e66aa0
--- /dev/null
+++ b/lua/src/lauxlib.h
@@ -0,0 +1,264 @@
+/*
+** $Id: lauxlib.h,v 1.131 2016/12/06 14:54:31 roberto Exp $
+** Auxiliary functions for building Lua libraries
+** See Copyright Notice in lua.h
+*/
+
+
+#ifndef lauxlib_h
+#define lauxlib_h
+
+
+#include <stddef.h>
+#include <stdio.h>
+
+#include "lua.h"
+
+
+
+/* extra error code for 'luaL_loadfilex' */
+#define LUA_ERRFILE (LUA_ERRERR+1)
+
+
+/* key, in the registry, for table of loaded modules */
+#define LUA_LOADED_TABLE "_LOADED"
+
+
+/* key, in the registry, for table of preloaded loaders */
+#define LUA_PRELOAD_TABLE "_PRELOAD"
+
+
+typedef struct luaL_Reg {
+ const char *name;
+ lua_CFunction func;
+} luaL_Reg;
+
+
+#define LUAL_NUMSIZES (sizeof(lua_Integer)*16 + sizeof(lua_Number))
+
+LUALIB_API void (luaL_checkversion_) (lua_State *L, lua_Number ver, size_t sz);
+#define luaL_checkversion(L) \
+ luaL_checkversion_(L, LUA_VERSION_NUM, LUAL_NUMSIZES)
+
+LUALIB_API int (luaL_getmetafield) (lua_State *L, int obj, const char *e);
+LUALIB_API int (luaL_callmeta) (lua_State *L, int obj, const char *e);
+LUALIB_API const char *(luaL_tolstring) (lua_State *L, int idx, size_t *len);
+LUALIB_API int (luaL_argerror) (lua_State *L, int arg, const char *extramsg);
+LUALIB_API const char *(luaL_checklstring) (lua_State *L, int arg,
+ size_t *l);
+LUALIB_API const char *(luaL_optlstring) (lua_State *L, int arg,
+ const char *def, size_t *l);
+LUALIB_API lua_Number (luaL_checknumber) (lua_State *L, int arg);
+LUALIB_API lua_Number (luaL_optnumber) (lua_State *L, int arg, lua_Number def);
+
+LUALIB_API lua_Integer (luaL_checkinteger) (lua_State *L, int arg);
+LUALIB_API lua_Integer (luaL_optinteger) (lua_State *L, int arg,
+ lua_Integer def);
+
+LUALIB_API void (luaL_checkstack) (lua_State *L, int sz, const char *msg);
+LUALIB_API void (luaL_checktype) (lua_State *L, int arg, int t);
+LUALIB_API void (luaL_checkany) (lua_State *L, int arg);
+
+LUALIB_API int (luaL_newmetatable) (lua_State *L, const char *tname);
+LUALIB_API void (luaL_setmetatable) (lua_State *L, const char *tname);
+LUALIB_API void *(luaL_testudata) (lua_State *L, int ud, const char *tname);
+LUALIB_API void *(luaL_checkudata) (lua_State *L, int ud, const char *tname);
+
+LUALIB_API void (luaL_where) (lua_State *L, int lvl);
+LUALIB_API int (luaL_error) (lua_State *L, const char *fmt, ...);
+
+LUALIB_API int (luaL_checkoption) (lua_State *L, int arg, const char *def,
+ const char *const lst[]);
+
+LUALIB_API int (luaL_fileresult) (lua_State *L, int stat, const char *fname);
+LUALIB_API int (luaL_execresult) (lua_State *L, int stat);
+
+/* predefined references */
+#define LUA_NOREF (-2)
+#define LUA_REFNIL (-1)
+
+LUALIB_API int (luaL_ref) (lua_State *L, int t);
+LUALIB_API void (luaL_unref) (lua_State *L, int t, int ref);
+
+LUALIB_API int (luaL_loadfilex) (lua_State *L, const char *filename,
+ const char *mode);
+
+#define luaL_loadfile(L,f) luaL_loadfilex(L,f,NULL)
+
+LUALIB_API int (luaL_loadbufferx) (lua_State *L, const char *buff, size_t sz,
+ const char *name, const char *mode);
+LUALIB_API int (luaL_loadstring) (lua_State *L, const char *s);
+
+LUALIB_API lua_State *(luaL_newstate) (void);
+
+LUALIB_API lua_Integer (luaL_len) (lua_State *L, int idx);
+
+LUALIB_API const char *(luaL_gsub) (lua_State *L, const char *s, const char *p,
+ const char *r);
+
+LUALIB_API void (luaL_setfuncs) (lua_State *L, const luaL_Reg *l, int nup);
+
+LUALIB_API int (luaL_getsubtable) (lua_State *L, int idx, const char *fname);
+
+LUALIB_API void (luaL_traceback) (lua_State *L, lua_State *L1,
+ const char *msg, int level);
+
+LUALIB_API void (luaL_requiref) (lua_State *L, const char *modname,
+ lua_CFunction openf, int glb);
+
+/*
+** ===============================================================
+** some useful macros
+** ===============================================================
+*/
+
+
+#define luaL_newlibtable(L,l) \
+ lua_createtable(L, 0, sizeof(l)/sizeof((l)[0]) - 1)
+
+#define luaL_newlib(L,l) \
+ (luaL_checkversion(L), luaL_newlibtable(L,l), luaL_setfuncs(L,l,0))
+
+#define luaL_argcheck(L, cond,arg,extramsg) \
+ ((void)((cond) || luaL_argerror(L, (arg), (extramsg))))
+#define luaL_checkstring(L,n) (luaL_checklstring(L, (n), NULL))
+#define luaL_optstring(L,n,d) (luaL_optlstring(L, (n), (d), NULL))
+
+#define luaL_typename(L,i) lua_typename(L, lua_type(L,(i)))
+
+#define luaL_dofile(L, fn) \
+ (luaL_loadfile(L, fn) || lua_pcall(L, 0, LUA_MULTRET, 0))
+
+#define luaL_dostring(L, s) \
+ (luaL_loadstring(L, s) || lua_pcall(L, 0, LUA_MULTRET, 0))
+
+#define luaL_getmetatable(L,n) (lua_getfield(L, LUA_REGISTRYINDEX, (n)))
+
+#define luaL_opt(L,f,n,d) (lua_isnoneornil(L,(n)) ? (d) : f(L,(n)))
+
+#define luaL_loadbuffer(L,s,sz,n) luaL_loadbufferx(L,s,sz,n,NULL)
+
+
+/*
+** {======================================================
+** Generic Buffer manipulation
+** =======================================================
+*/
+
+typedef struct luaL_Buffer {
+ char *b; /* buffer address */
+ size_t size; /* buffer size */
+ size_t n; /* number of characters in buffer */
+ lua_State *L;
+ char initb[LUAL_BUFFERSIZE]; /* initial buffer */
+} luaL_Buffer;
+
+
+#define luaL_addchar(B,c) \
+ ((void)((B)->n < (B)->size || luaL_prepbuffsize((B), 1)), \
+ ((B)->b[(B)->n++] = (c)))
+
+#define luaL_addsize(B,s) ((B)->n += (s))
+
+LUALIB_API void (luaL_buffinit) (lua_State *L, luaL_Buffer *B);
+LUALIB_API char *(luaL_prepbuffsize) (luaL_Buffer *B, size_t sz);
+LUALIB_API void (luaL_addlstring) (luaL_Buffer *B, const char *s, size_t l);
+LUALIB_API void (luaL_addstring) (luaL_Buffer *B, const char *s);
+LUALIB_API void (luaL_addvalue) (luaL_Buffer *B);
+LUALIB_API void (luaL_pushresult) (luaL_Buffer *B);
+LUALIB_API void (luaL_pushresultsize) (luaL_Buffer *B, size_t sz);
+LUALIB_API char *(luaL_buffinitsize) (lua_State *L, luaL_Buffer *B, size_t sz);
+
+#define luaL_prepbuffer(B) luaL_prepbuffsize(B, LUAL_BUFFERSIZE)
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** File handles for IO library
+** =======================================================
+*/
+
+/*
+** A file handle is a userdata with metatable 'LUA_FILEHANDLE' and
+** initial structure 'luaL_Stream' (it may contain other fields
+** after that initial structure).
+*/
+
+#define LUA_FILEHANDLE "FILE*"
+
+
+typedef struct luaL_Stream {
+ FILE *f; /* stream (NULL for incompletely created streams) */
+ lua_CFunction closef; /* to close stream (NULL for closed streams) */
+} luaL_Stream;
+
+/* }====================================================== */
+
+
+
+/* compatibility with old module system */
+#if defined(LUA_COMPAT_MODULE)
+
+LUALIB_API void (luaL_pushmodule) (lua_State *L, const char *modname,
+ int sizehint);
+LUALIB_API void (luaL_openlib) (lua_State *L, const char *libname,
+ const luaL_Reg *l, int nup);
+
+#define luaL_register(L,n,l) (luaL_openlib(L,(n),(l),0))
+
+#endif
+
+
+/*
+** {==================================================================
+** "Abstraction Layer" for basic report of messages and errors
+** ===================================================================
+*/
+
+/* print a string */
+#if !defined(lua_writestring)
+#define lua_writestring(s,l) fwrite((s), sizeof(char), (l), stdout)
+#endif
+
+/* print a newline and flush the output */
+#if !defined(lua_writeline)
+#define lua_writeline() (lua_writestring("\n", 1), fflush(stdout))
+#endif
+
+/* print an error message */
+#if !defined(lua_writestringerror)
+#define lua_writestringerror(s,p) \
+ (fprintf(stderr, (s), (p)), fflush(stderr))
+#endif
+
+/* }================================================================== */
+
+
+/*
+** {============================================================
+** Compatibility with deprecated conversions
+** =============================================================
+*/
+#if defined(LUA_COMPAT_APIINTCASTS)
+
+#define luaL_checkunsigned(L,a) ((lua_Unsigned)luaL_checkinteger(L,a))
+#define luaL_optunsigned(L,a,d) \
+ ((lua_Unsigned)luaL_optinteger(L,a,(lua_Integer)(d)))
+
+#define luaL_checkint(L,n) ((int)luaL_checkinteger(L, (n)))
+#define luaL_optint(L,n,d) ((int)luaL_optinteger(L, (n), (d)))
+
+#define luaL_checklong(L,n) ((long)luaL_checkinteger(L, (n)))
+#define luaL_optlong(L,n,d) ((long)luaL_optinteger(L, (n), (d)))
+
+#endif
+/* }============================================================ */
+
+
+
+#endif
+
+
diff --git a/lua/src/lbaselib.c b/lua/src/lbaselib.c
new file mode 100644
index 000000000..08523e6e7
--- /dev/null
+++ b/lua/src/lbaselib.c
@@ -0,0 +1,498 @@
+/*
+** $Id: lbaselib.c,v 1.314 2016/09/05 19:06:34 roberto Exp $
+** Basic library
+** See Copyright Notice in lua.h
+*/
+
+#define lbaselib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <ctype.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+static int luaB_print (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ int i;
+ lua_getglobal(L, "tostring");
+ for (i=1; i<=n; i++) {
+ const char *s;
+ size_t l;
+ lua_pushvalue(L, -1); /* function to be called */
+ lua_pushvalue(L, i); /* value to print */
+ lua_call(L, 1, 1);
+ s = lua_tolstring(L, -1, &l); /* get result */
+ if (s == NULL)
+ return luaL_error(L, "'tostring' must return a string to 'print'");
+ if (i>1) lua_writestring("\t", 1);
+ lua_writestring(s, l);
+ lua_pop(L, 1); /* pop result */
+ }
+ lua_writeline();
+ return 0;
+}
+
+
+#define SPACECHARS " \f\n\r\t\v"
+
+static const char *b_str2int (const char *s, int base, lua_Integer *pn) {
+ lua_Unsigned n = 0;
+ int neg = 0;
+ s += strspn(s, SPACECHARS); /* skip initial spaces */
+ if (*s == '-') { s++; neg = 1; } /* handle signal */
+ else if (*s == '+') s++;
+ if (!isalnum((unsigned char)*s)) /* no digit? */
+ return NULL;
+ do {
+ int digit = (isdigit((unsigned char)*s)) ? *s - '0'
+ : (toupper((unsigned char)*s) - 'A') + 10;
+ if (digit >= base) return NULL; /* invalid numeral */
+ n = n * base + digit;
+ s++;
+ } while (isalnum((unsigned char)*s));
+ s += strspn(s, SPACECHARS); /* skip trailing spaces */
+ *pn = (lua_Integer)((neg) ? (0u - n) : n);
+ return s;
+}
+
+
+static int luaB_tonumber (lua_State *L) {
+ if (lua_isnoneornil(L, 2)) { /* standard conversion? */
+ luaL_checkany(L, 1);
+ if (lua_type(L, 1) == LUA_TNUMBER) { /* already a number? */
+ lua_settop(L, 1); /* yes; return it */
+ return 1;
+ }
+ else {
+ size_t l;
+ const char *s = lua_tolstring(L, 1, &l);
+ if (s != NULL && lua_stringtonumber(L, s) == l + 1)
+ return 1; /* successful conversion to number */
+ /* else not a number */
+ }
+ }
+ else {
+ size_t l;
+ const char *s;
+ lua_Integer n = 0; /* to avoid warnings */
+ lua_Integer base = luaL_checkinteger(L, 2);
+ luaL_checktype(L, 1, LUA_TSTRING); /* no numbers as strings */
+ s = lua_tolstring(L, 1, &l);
+ luaL_argcheck(L, 2 <= base && base <= 36, 2, "base out of range");
+ if (b_str2int(s, (int)base, &n) == s + l) {
+ lua_pushinteger(L, n);
+ return 1;
+ } /* else not a number */
+ } /* else not a number */
+ lua_pushnil(L); /* not a number */
+ return 1;
+}
+
+
+static int luaB_error (lua_State *L) {
+ int level = (int)luaL_optinteger(L, 2, 1);
+ lua_settop(L, 1);
+ if (lua_type(L, 1) == LUA_TSTRING && level > 0) {
+ luaL_where(L, level); /* add extra information */
+ lua_pushvalue(L, 1);
+ lua_concat(L, 2);
+ }
+ return lua_error(L);
+}
+
+
+static int luaB_getmetatable (lua_State *L) {
+ luaL_checkany(L, 1);
+ if (!lua_getmetatable(L, 1)) {
+ lua_pushnil(L);
+ return 1; /* no metatable */
+ }
+ luaL_getmetafield(L, 1, "__metatable");
+ return 1; /* returns either __metatable field (if present) or metatable */
+}
+
+
+static int luaB_setmetatable (lua_State *L) {
+ int t = lua_type(L, 2);
+ luaL_checktype(L, 1, LUA_TTABLE);
+ luaL_argcheck(L, t == LUA_TNIL || t == LUA_TTABLE, 2,
+ "nil or table expected");
+ if (luaL_getmetafield(L, 1, "__metatable") != LUA_TNIL)
+ return luaL_error(L, "cannot change a protected metatable");
+ lua_settop(L, 2);
+ lua_setmetatable(L, 1);
+ return 1;
+}
+
+
+static int luaB_rawequal (lua_State *L) {
+ luaL_checkany(L, 1);
+ luaL_checkany(L, 2);
+ lua_pushboolean(L, lua_rawequal(L, 1, 2));
+ return 1;
+}
+
+
+static int luaB_rawlen (lua_State *L) {
+ int t = lua_type(L, 1);
+ luaL_argcheck(L, t == LUA_TTABLE || t == LUA_TSTRING, 1,
+ "table or string expected");
+ lua_pushinteger(L, lua_rawlen(L, 1));
+ return 1;
+}
+
+
+static int luaB_rawget (lua_State *L) {
+ luaL_checktype(L, 1, LUA_TTABLE);
+ luaL_checkany(L, 2);
+ lua_settop(L, 2);
+ lua_rawget(L, 1);
+ return 1;
+}
+
+static int luaB_rawset (lua_State *L) {
+ luaL_checktype(L, 1, LUA_TTABLE);
+ luaL_checkany(L, 2);
+ luaL_checkany(L, 3);
+ lua_settop(L, 3);
+ lua_rawset(L, 1);
+ return 1;
+}
+
+
+static int luaB_collectgarbage (lua_State *L) {
+ static const char *const opts[] = {"stop", "restart", "collect",
+ "count", "step", "setpause", "setstepmul",
+ "isrunning", NULL};
+ static const int optsnum[] = {LUA_GCSTOP, LUA_GCRESTART, LUA_GCCOLLECT,
+ LUA_GCCOUNT, LUA_GCSTEP, LUA_GCSETPAUSE, LUA_GCSETSTEPMUL,
+ LUA_GCISRUNNING};
+ int o = optsnum[luaL_checkoption(L, 1, "collect", opts)];
+ int ex = (int)luaL_optinteger(L, 2, 0);
+ int res = lua_gc(L, o, ex);
+ switch (o) {
+ case LUA_GCCOUNT: {
+ int b = lua_gc(L, LUA_GCCOUNTB, 0);
+ lua_pushnumber(L, (lua_Number)res + ((lua_Number)b/1024));
+ return 1;
+ }
+ case LUA_GCSTEP: case LUA_GCISRUNNING: {
+ lua_pushboolean(L, res);
+ return 1;
+ }
+ default: {
+ lua_pushinteger(L, res);
+ return 1;
+ }
+ }
+}
+
+
+static int luaB_type (lua_State *L) {
+ int t = lua_type(L, 1);
+ luaL_argcheck(L, t != LUA_TNONE, 1, "value expected");
+ lua_pushstring(L, lua_typename(L, t));
+ return 1;
+}
+
+
+static int pairsmeta (lua_State *L, const char *method, int iszero,
+ lua_CFunction iter) {
+ luaL_checkany(L, 1);
+ if (luaL_getmetafield(L, 1, method) == LUA_TNIL) { /* no metamethod? */
+ lua_pushcfunction(L, iter); /* will return generator, */
+ lua_pushvalue(L, 1); /* state, */
+ if (iszero) lua_pushinteger(L, 0); /* and initial value */
+ else lua_pushnil(L);
+ }
+ else {
+ lua_pushvalue(L, 1); /* argument 'self' to metamethod */
+ lua_call(L, 1, 3); /* get 3 values from metamethod */
+ }
+ return 3;
+}
+
+
+static int luaB_next (lua_State *L) {
+ luaL_checktype(L, 1, LUA_TTABLE);
+ lua_settop(L, 2); /* create a 2nd argument if there isn't one */
+ if (lua_next(L, 1))
+ return 2;
+ else {
+ lua_pushnil(L);
+ return 1;
+ }
+}
+
+
+static int luaB_pairs (lua_State *L) {
+ return pairsmeta(L, "__pairs", 0, luaB_next);
+}
+
+
+/*
+** Traversal function for 'ipairs'
+*/
+static int ipairsaux (lua_State *L) {
+ lua_Integer i = luaL_checkinteger(L, 2) + 1;
+ lua_pushinteger(L, i);
+ return (lua_geti(L, 1, i) == LUA_TNIL) ? 1 : 2;
+}
+
+
+/*
+** 'ipairs' function. Returns 'ipairsaux', given "table", 0.
+** (The given "table" may not be a table.)
+*/
+static int luaB_ipairs (lua_State *L) {
+#if defined(LUA_COMPAT_IPAIRS)
+ return pairsmeta(L, "__ipairs", 1, ipairsaux);
+#else
+ luaL_checkany(L, 1);
+ lua_pushcfunction(L, ipairsaux); /* iteration function */
+ lua_pushvalue(L, 1); /* state */
+ lua_pushinteger(L, 0); /* initial value */
+ return 3;
+#endif
+}
+
+
+static int load_aux (lua_State *L, int status, int envidx) {
+ if (status == LUA_OK) {
+ if (envidx != 0) { /* 'env' parameter? */
+ lua_pushvalue(L, envidx); /* environment for loaded function */
+ if (!lua_setupvalue(L, -2, 1)) /* set it as 1st upvalue */
+ lua_pop(L, 1); /* remove 'env' if not used by previous call */
+ }
+ return 1;
+ }
+ else { /* error (message is on top of the stack) */
+ lua_pushnil(L);
+ lua_insert(L, -2); /* put before error message */
+ return 2; /* return nil plus error message */
+ }
+}
+
+
+static int luaB_loadfile (lua_State *L) {
+ const char *fname = luaL_optstring(L, 1, NULL);
+ const char *mode = luaL_optstring(L, 2, NULL);
+ int env = (!lua_isnone(L, 3) ? 3 : 0); /* 'env' index or 0 if no 'env' */
+ int status = luaL_loadfilex(L, fname, mode);
+ return load_aux(L, status, env);
+}
+
+
+/*
+** {======================================================
+** Generic Read function
+** =======================================================
+*/
+
+
+/*
+** reserved slot, above all arguments, to hold a copy of the returned
+** string to avoid it being collected while parsed. 'load' has four
+** optional arguments (chunk, source name, mode, and environment).
+*/
+#define RESERVEDSLOT 5
+
+
+/*
+** Reader for generic 'load' function: 'lua_load' uses the
+** stack for internal stuff, so the reader cannot change the
+** stack top. Instead, it keeps its resulting string in a
+** reserved slot inside the stack.
+*/
+static const char *generic_reader (lua_State *L, void *ud, size_t *size) {
+ (void)(ud); /* not used */
+ luaL_checkstack(L, 2, "too many nested functions");
+ lua_pushvalue(L, 1); /* get function */
+ lua_call(L, 0, 1); /* call it */
+ if (lua_isnil(L, -1)) {
+ lua_pop(L, 1); /* pop result */
+ *size = 0;
+ return NULL;
+ }
+ else if (!lua_isstring(L, -1))
+ luaL_error(L, "reader function must return a string");
+ lua_replace(L, RESERVEDSLOT); /* save string in reserved slot */
+ return lua_tolstring(L, RESERVEDSLOT, size);
+}
+
+
+static int luaB_load (lua_State *L) {
+ int status;
+ size_t l;
+ const char *s = lua_tolstring(L, 1, &l);
+ const char *mode = luaL_optstring(L, 3, "bt");
+ int env = (!lua_isnone(L, 4) ? 4 : 0); /* 'env' index or 0 if no 'env' */
+ if (s != NULL) { /* loading a string? */
+ const char *chunkname = luaL_optstring(L, 2, s);
+ status = luaL_loadbufferx(L, s, l, chunkname, mode);
+ }
+ else { /* loading from a reader function */
+ const char *chunkname = luaL_optstring(L, 2, "=(load)");
+ luaL_checktype(L, 1, LUA_TFUNCTION);
+ lua_settop(L, RESERVEDSLOT); /* create reserved slot */
+ status = lua_load(L, generic_reader, NULL, chunkname, mode);
+ }
+ return load_aux(L, status, env);
+}
+
+/* }====================================================== */
+
+
+static int dofilecont (lua_State *L, int d1, lua_KContext d2) {
+ (void)d1; (void)d2; /* only to match 'lua_Kfunction' prototype */
+ return lua_gettop(L) - 1;
+}
+
+
+static int luaB_dofile (lua_State *L) {
+ const char *fname = luaL_optstring(L, 1, NULL);
+ lua_settop(L, 1);
+ if (luaL_loadfile(L, fname) != LUA_OK)
+ return lua_error(L);
+ lua_callk(L, 0, LUA_MULTRET, 0, dofilecont);
+ return dofilecont(L, 0, 0);
+}
+
+
+static int luaB_assert (lua_State *L) {
+ if (lua_toboolean(L, 1)) /* condition is true? */
+ return lua_gettop(L); /* return all arguments */
+ else { /* error */
+ luaL_checkany(L, 1); /* there must be a condition */
+ lua_remove(L, 1); /* remove it */
+ lua_pushliteral(L, "assertion failed!"); /* default message */
+ lua_settop(L, 1); /* leave only message (default if no other one) */
+ return luaB_error(L); /* call 'error' */
+ }
+}
+
+
+static int luaB_select (lua_State *L) {
+ int n = lua_gettop(L);
+ if (lua_type(L, 1) == LUA_TSTRING && *lua_tostring(L, 1) == '#') {
+ lua_pushinteger(L, n-1);
+ return 1;
+ }
+ else {
+ lua_Integer i = luaL_checkinteger(L, 1);
+ if (i < 0) i = n + i;
+ else if (i > n) i = n;
+ luaL_argcheck(L, 1 <= i, 1, "index out of range");
+ return n - (int)i;
+ }
+}
+
+
+/*
+** Continuation function for 'pcall' and 'xpcall'. Both functions
+** already pushed a 'true' before doing the call, so in case of success
+** 'finishpcall' only has to return everything in the stack minus
+** 'extra' values (where 'extra' is exactly the number of items to be
+** ignored).
+*/
+static int finishpcall (lua_State *L, int status, lua_KContext extra) {
+ if (status != LUA_OK && status != LUA_YIELD) { /* error? */
+ lua_pushboolean(L, 0); /* first result (false) */
+ lua_pushvalue(L, -2); /* error message */
+ return 2; /* return false, msg */
+ }
+ else
+ return lua_gettop(L) - (int)extra; /* return all results */
+}
+
+
+static int luaB_pcall (lua_State *L) {
+ int status;
+ luaL_checkany(L, 1);
+ lua_pushboolean(L, 1); /* first result if no errors */
+ lua_insert(L, 1); /* put it in place */
+ status = lua_pcallk(L, lua_gettop(L) - 2, LUA_MULTRET, 0, 0, finishpcall);
+ return finishpcall(L, status, 0);
+}
+
+
+/*
+** Do a protected call with error handling. After 'lua_rotate', the
+** stack will have <f, err, true, f, [args...]>; so, the function passes
+** 2 to 'finishpcall' to skip the 2 first values when returning results.
+*/
+static int luaB_xpcall (lua_State *L) {
+ int status;
+ int n = lua_gettop(L);
+ luaL_checktype(L, 2, LUA_TFUNCTION); /* check error function */
+ lua_pushboolean(L, 1); /* first result */
+ lua_pushvalue(L, 1); /* function */
+ lua_rotate(L, 3, 2); /* move them below function's arguments */
+ status = lua_pcallk(L, n - 2, LUA_MULTRET, 2, 2, finishpcall);
+ return finishpcall(L, status, 2);
+}
+
+
+static int luaB_tostring (lua_State *L) {
+ luaL_checkany(L, 1);
+ luaL_tolstring(L, 1, NULL);
+ return 1;
+}
+
+
+static const luaL_Reg base_funcs[] = {
+ {"assert", luaB_assert},
+ {"collectgarbage", luaB_collectgarbage},
+ {"dofile", luaB_dofile},
+ {"error", luaB_error},
+ {"getmetatable", luaB_getmetatable},
+ {"ipairs", luaB_ipairs},
+ {"loadfile", luaB_loadfile},
+ {"load", luaB_load},
+#if defined(LUA_COMPAT_LOADSTRING)
+ {"loadstring", luaB_load},
+#endif
+ {"next", luaB_next},
+ {"pairs", luaB_pairs},
+ {"pcall", luaB_pcall},
+ {"print", luaB_print},
+ {"rawequal", luaB_rawequal},
+ {"rawlen", luaB_rawlen},
+ {"rawget", luaB_rawget},
+ {"rawset", luaB_rawset},
+ {"select", luaB_select},
+ {"setmetatable", luaB_setmetatable},
+ {"tonumber", luaB_tonumber},
+ {"tostring", luaB_tostring},
+ {"type", luaB_type},
+ {"xpcall", luaB_xpcall},
+ /* placeholders */
+ {"_G", NULL},
+ {"_VERSION", NULL},
+ {NULL, NULL}
+};
+
+
+LUAMOD_API int luaopen_base (lua_State *L) {
+ /* open lib into global table */
+ lua_pushglobaltable(L);
+ luaL_setfuncs(L, base_funcs, 0);
+ /* set global _G */
+ lua_pushvalue(L, -1);
+ lua_setfield(L, -2, "_G");
+ /* set global _VERSION */
+ lua_pushliteral(L, LUA_VERSION);
+ lua_setfield(L, -2, "_VERSION");
+ return 1;
+}
+
diff --git a/lua/src/lbitlib.c b/lua/src/lbitlib.c
new file mode 100644
index 000000000..1cb1d5b93
--- /dev/null
+++ b/lua/src/lbitlib.c
@@ -0,0 +1,233 @@
+/*
+** $Id: lbitlib.c,v 1.30 2015/11/11 19:08:09 roberto Exp $
+** Standard library for bitwise operations
+** See Copyright Notice in lua.h
+*/
+
+#define lbitlib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+#if defined(LUA_COMPAT_BITLIB) /* { */
+
+
+#define pushunsigned(L,n) lua_pushinteger(L, (lua_Integer)(n))
+#define checkunsigned(L,i) ((lua_Unsigned)luaL_checkinteger(L,i))
+
+
+/* number of bits to consider in a number */
+#if !defined(LUA_NBITS)
+#define LUA_NBITS 32
+#endif
+
+
+/*
+** a lua_Unsigned with its first LUA_NBITS bits equal to 1. (Shift must
+** be made in two parts to avoid problems when LUA_NBITS is equal to the
+** number of bits in a lua_Unsigned.)
+*/
+#define ALLONES (~(((~(lua_Unsigned)0) << (LUA_NBITS - 1)) << 1))
+
+
+/* macro to trim extra bits */
+#define trim(x) ((x) & ALLONES)
+
+
+/* builds a number with 'n' ones (1 <= n <= LUA_NBITS) */
+#define mask(n) (~((ALLONES << 1) << ((n) - 1)))
+
+
+
+static lua_Unsigned andaux (lua_State *L) {
+ int i, n = lua_gettop(L);
+ lua_Unsigned r = ~(lua_Unsigned)0;
+ for (i = 1; i <= n; i++)
+ r &= checkunsigned(L, i);
+ return trim(r);
+}
+
+
+static int b_and (lua_State *L) {
+ lua_Unsigned r = andaux(L);
+ pushunsigned(L, r);
+ return 1;
+}
+
+
+static int b_test (lua_State *L) {
+ lua_Unsigned r = andaux(L);
+ lua_pushboolean(L, r != 0);
+ return 1;
+}
+
+
+static int b_or (lua_State *L) {
+ int i, n = lua_gettop(L);
+ lua_Unsigned r = 0;
+ for (i = 1; i <= n; i++)
+ r |= checkunsigned(L, i);
+ pushunsigned(L, trim(r));
+ return 1;
+}
+
+
+static int b_xor (lua_State *L) {
+ int i, n = lua_gettop(L);
+ lua_Unsigned r = 0;
+ for (i = 1; i <= n; i++)
+ r ^= checkunsigned(L, i);
+ pushunsigned(L, trim(r));
+ return 1;
+}
+
+
+static int b_not (lua_State *L) {
+ lua_Unsigned r = ~checkunsigned(L, 1);
+ pushunsigned(L, trim(r));
+ return 1;
+}
+
+
+static int b_shift (lua_State *L, lua_Unsigned r, lua_Integer i) {
+ if (i < 0) { /* shift right? */
+ i = -i;
+ r = trim(r);
+ if (i >= LUA_NBITS) r = 0;
+ else r >>= i;
+ }
+ else { /* shift left */
+ if (i >= LUA_NBITS) r = 0;
+ else r <<= i;
+ r = trim(r);
+ }
+ pushunsigned(L, r);
+ return 1;
+}
+
+
+static int b_lshift (lua_State *L) {
+ return b_shift(L, checkunsigned(L, 1), luaL_checkinteger(L, 2));
+}
+
+
+static int b_rshift (lua_State *L) {
+ return b_shift(L, checkunsigned(L, 1), -luaL_checkinteger(L, 2));
+}
+
+
+static int b_arshift (lua_State *L) {
+ lua_Unsigned r = checkunsigned(L, 1);
+ lua_Integer i = luaL_checkinteger(L, 2);
+ if (i < 0 || !(r & ((lua_Unsigned)1 << (LUA_NBITS - 1))))
+ return b_shift(L, r, -i);
+ else { /* arithmetic shift for 'negative' number */
+ if (i >= LUA_NBITS) r = ALLONES;
+ else
+ r = trim((r >> i) | ~(trim(~(lua_Unsigned)0) >> i)); /* add signal bit */
+ pushunsigned(L, r);
+ return 1;
+ }
+}
+
+
+static int b_rot (lua_State *L, lua_Integer d) {
+ lua_Unsigned r = checkunsigned(L, 1);
+ int i = d & (LUA_NBITS - 1); /* i = d % NBITS */
+ r = trim(r);
+ if (i != 0) /* avoid undefined shift of LUA_NBITS when i == 0 */
+ r = (r << i) | (r >> (LUA_NBITS - i));
+ pushunsigned(L, trim(r));
+ return 1;
+}
+
+
+static int b_lrot (lua_State *L) {
+ return b_rot(L, luaL_checkinteger(L, 2));
+}
+
+
+static int b_rrot (lua_State *L) {
+ return b_rot(L, -luaL_checkinteger(L, 2));
+}
+
+
+/*
+** get field and width arguments for field-manipulation functions,
+** checking whether they are valid.
+** ('luaL_error' called without 'return' to avoid later warnings about
+** 'width' being used uninitialized.)
+*/
+static int fieldargs (lua_State *L, int farg, int *width) {
+ lua_Integer f = luaL_checkinteger(L, farg);
+ lua_Integer w = luaL_optinteger(L, farg + 1, 1);
+ luaL_argcheck(L, 0 <= f, farg, "field cannot be negative");
+ luaL_argcheck(L, 0 < w, farg + 1, "width must be positive");
+ if (f + w > LUA_NBITS)
+ luaL_error(L, "trying to access non-existent bits");
+ *width = (int)w;
+ return (int)f;
+}
+
+
+static int b_extract (lua_State *L) {
+ int w;
+ lua_Unsigned r = trim(checkunsigned(L, 1));
+ int f = fieldargs(L, 2, &w);
+ r = (r >> f) & mask(w);
+ pushunsigned(L, r);
+ return 1;
+}
+
+
+static int b_replace (lua_State *L) {
+ int w;
+ lua_Unsigned r = trim(checkunsigned(L, 1));
+ lua_Unsigned v = trim(checkunsigned(L, 2));
+ int f = fieldargs(L, 3, &w);
+ lua_Unsigned m = mask(w);
+ r = (r & ~(m << f)) | ((v & m) << f);
+ pushunsigned(L, r);
+ return 1;
+}
+
+
+static const luaL_Reg bitlib[] = {
+ {"arshift", b_arshift},
+ {"band", b_and},
+ {"bnot", b_not},
+ {"bor", b_or},
+ {"bxor", b_xor},
+ {"btest", b_test},
+ {"extract", b_extract},
+ {"lrotate", b_lrot},
+ {"lshift", b_lshift},
+ {"replace", b_replace},
+ {"rrotate", b_rrot},
+ {"rshift", b_rshift},
+ {NULL, NULL}
+};
+
+
+
+LUAMOD_API int luaopen_bit32 (lua_State *L) {
+ luaL_newlib(L, bitlib);
+ return 1;
+}
+
+
+#else /* }{ */
+
+
+LUAMOD_API int luaopen_bit32 (lua_State *L) {
+ return luaL_error(L, "library 'bit32' has been deprecated");
+}
+
+#endif /* } */
diff --git a/lua/src/lcode.c b/lua/src/lcode.c
new file mode 100644
index 000000000..0bb414262
--- /dev/null
+++ b/lua/src/lcode.c
@@ -0,0 +1,1203 @@
+/*
+** $Id: lcode.c,v 2.112 2016/12/22 13:08:50 roberto Exp $
+** Code generator for Lua
+** See Copyright Notice in lua.h
+*/
+
+#define lcode_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "lua.h"
+
+#include "lcode.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lgc.h"
+#include "llex.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lparser.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "lvm.h"
+
+
+/* Maximum number of registers in a Lua function (must fit in 8 bits) */
+#define MAXREGS 255
+
+
+#define hasjumps(e) ((e)->t != (e)->f)
+
+
+/*
+** If expression is a numeric constant, fills 'v' with its value
+** and returns 1. Otherwise, returns 0.
+*/
+static int tonumeral(const expdesc *e, TValue *v) {
+ if (hasjumps(e))
+ return 0; /* not a numeral */
+ switch (e->k) {
+ case VKINT:
+ if (v) setivalue(v, e->u.ival);
+ return 1;
+ case VKFLT:
+ if (v) setfltvalue(v, e->u.nval);
+ return 1;
+ default: return 0;
+ }
+}
+
+
+/*
+** Create a OP_LOADNIL instruction, but try to optimize: if the previous
+** instruction is also OP_LOADNIL and ranges are compatible, adjust
+** range of previous instruction instead of emitting a new one. (For
+** instance, 'local a; local b' will generate a single opcode.)
+*/
+void luaK_nil (FuncState *fs, int from, int n) {
+ Instruction *previous;
+ int l = from + n - 1; /* last register to set nil */
+ if (fs->pc > fs->lasttarget) { /* no jumps to current position? */
+ previous = &fs->f->code[fs->pc-1];
+ if (GET_OPCODE(*previous) == OP_LOADNIL) { /* previous is LOADNIL? */
+ int pfrom = GETARG_A(*previous); /* get previous range */
+ int pl = pfrom + GETARG_B(*previous);
+ if ((pfrom <= from && from <= pl + 1) ||
+ (from <= pfrom && pfrom <= l + 1)) { /* can connect both? */
+ if (pfrom < from) from = pfrom; /* from = min(from, pfrom) */
+ if (pl > l) l = pl; /* l = max(l, pl) */
+ SETARG_A(*previous, from);
+ SETARG_B(*previous, l - from);
+ return;
+ }
+ } /* else go through */
+ }
+ luaK_codeABC(fs, OP_LOADNIL, from, n - 1, 0); /* else no optimization */
+}
+
+
+/*
+** Gets the destination address of a jump instruction. Used to traverse
+** a list of jumps.
+*/
+static int getjump (FuncState *fs, int pc) {
+ int offset = GETARG_sBx(fs->f->code[pc]);
+ if (offset == NO_JUMP) /* point to itself represents end of list */
+ return NO_JUMP; /* end of list */
+ else
+ return (pc+1)+offset; /* turn offset into absolute position */
+}
+
+
+/*
+** Fix jump instruction at position 'pc' to jump to 'dest'.
+** (Jump addresses are relative in Lua)
+*/
+static void fixjump (FuncState *fs, int pc, int dest) {
+ Instruction *jmp = &fs->f->code[pc];
+ int offset = dest - (pc + 1);
+ lua_assert(dest != NO_JUMP);
+ if (abs(offset) > MAXARG_sBx)
+ luaX_syntaxerror(fs->ls, "control structure too long");
+ SETARG_sBx(*jmp, offset);
+}
+
+
+/*
+** Concatenate jump-list 'l2' into jump-list 'l1'
+*/
+void luaK_concat (FuncState *fs, int *l1, int l2) {
+ if (l2 == NO_JUMP) return; /* nothing to concatenate? */
+ else if (*l1 == NO_JUMP) /* no original list? */
+ *l1 = l2; /* 'l1' points to 'l2' */
+ else {
+ int list = *l1;
+ int next;
+ while ((next = getjump(fs, list)) != NO_JUMP) /* find last element */
+ list = next;
+ fixjump(fs, list, l2); /* last element links to 'l2' */
+ }
+}
+
+
+/*
+** Create a jump instruction and return its position, so its destination
+** can be fixed later (with 'fixjump'). If there are jumps to
+** this position (kept in 'jpc'), link them all together so that
+** 'patchlistaux' will fix all them directly to the final destination.
+*/
+int luaK_jump (FuncState *fs) {
+ int jpc = fs->jpc; /* save list of jumps to here */
+ int j;
+ fs->jpc = NO_JUMP; /* no more jumps to here */
+ j = luaK_codeAsBx(fs, OP_JMP, 0, NO_JUMP);
+ luaK_concat(fs, &j, jpc); /* keep them on hold */
+ return j;
+}
+
+
+/*
+** Code a 'return' instruction
+*/
+void luaK_ret (FuncState *fs, int first, int nret) {
+ luaK_codeABC(fs, OP_RETURN, first, nret+1, 0);
+}
+
+
+/*
+** Code a "conditional jump", that is, a test or comparison opcode
+** followed by a jump. Return jump position.
+*/
+static int condjump (FuncState *fs, OpCode op, int A, int B, int C) {
+ luaK_codeABC(fs, op, A, B, C);
+ return luaK_jump(fs);
+}
+
+
+/*
+** returns current 'pc' and marks it as a jump target (to avoid wrong
+** optimizations with consecutive instructions not in the same basic block).
+*/
+int luaK_getlabel (FuncState *fs) {
+ fs->lasttarget = fs->pc;
+ return fs->pc;
+}
+
+
+/*
+** Returns the position of the instruction "controlling" a given
+** jump (that is, its condition), or the jump itself if it is
+** unconditional.
+*/
+static Instruction *getjumpcontrol (FuncState *fs, int pc) {
+ Instruction *pi = &fs->f->code[pc];
+ if (pc >= 1 && testTMode(GET_OPCODE(*(pi-1))))
+ return pi-1;
+ else
+ return pi;
+}
+
+
+/*
+** Patch destination register for a TESTSET instruction.
+** If instruction in position 'node' is not a TESTSET, return 0 ("fails").
+** Otherwise, if 'reg' is not 'NO_REG', set it as the destination
+** register. Otherwise, change instruction to a simple 'TEST' (produces
+** no register value)
+*/
+static int patchtestreg (FuncState *fs, int node, int reg) {
+ Instruction *i = getjumpcontrol(fs, node);
+ if (GET_OPCODE(*i) != OP_TESTSET)
+ return 0; /* cannot patch other instructions */
+ if (reg != NO_REG && reg != GETARG_B(*i))
+ SETARG_A(*i, reg);
+ else {
+ /* no register to put value or register already has the value;
+ change instruction to simple test */
+ *i = CREATE_ABC(OP_TEST, GETARG_B(*i), 0, GETARG_C(*i));
+ }
+ return 1;
+}
+
+
+/*
+** Traverse a list of tests ensuring no one produces a value
+*/
+static void removevalues (FuncState *fs, int list) {
+ for (; list != NO_JUMP; list = getjump(fs, list))
+ patchtestreg(fs, list, NO_REG);
+}
+
+
+/*
+** Traverse a list of tests, patching their destination address and
+** registers: tests producing values jump to 'vtarget' (and put their
+** values in 'reg'), other tests jump to 'dtarget'.
+*/
+static void patchlistaux (FuncState *fs, int list, int vtarget, int reg,
+ int dtarget) {
+ while (list != NO_JUMP) {
+ int next = getjump(fs, list);
+ if (patchtestreg(fs, list, reg))
+ fixjump(fs, list, vtarget);
+ else
+ fixjump(fs, list, dtarget); /* jump to default target */
+ list = next;
+ }
+}
+
+
+/*
+** Ensure all pending jumps to current position are fixed (jumping
+** to current position with no values) and reset list of pending
+** jumps
+*/
+static void dischargejpc (FuncState *fs) {
+ patchlistaux(fs, fs->jpc, fs->pc, NO_REG, fs->pc);
+ fs->jpc = NO_JUMP;
+}
+
+
+/*
+** Add elements in 'list' to list of pending jumps to "here"
+** (current position)
+*/
+void luaK_patchtohere (FuncState *fs, int list) {
+ luaK_getlabel(fs); /* mark "here" as a jump target */
+ luaK_concat(fs, &fs->jpc, list);
+}
+
+
+/*
+** Path all jumps in 'list' to jump to 'target'.
+** (The assert means that we cannot fix a jump to a forward address
+** because we only know addresses once code is generated.)
+*/
+void luaK_patchlist (FuncState *fs, int list, int target) {
+ if (target == fs->pc) /* 'target' is current position? */
+ luaK_patchtohere(fs, list); /* add list to pending jumps */
+ else {
+ lua_assert(target < fs->pc);
+ patchlistaux(fs, list, target, NO_REG, target);
+ }
+}
+
+
+/*
+** Path all jumps in 'list' to close upvalues up to given 'level'
+** (The assertion checks that jumps either were closing nothing
+** or were closing higher levels, from inner blocks.)
+*/
+void luaK_patchclose (FuncState *fs, int list, int level) {
+ level++; /* argument is +1 to reserve 0 as non-op */
+ for (; list != NO_JUMP; list = getjump(fs, list)) {
+ lua_assert(GET_OPCODE(fs->f->code[list]) == OP_JMP &&
+ (GETARG_A(fs->f->code[list]) == 0 ||
+ GETARG_A(fs->f->code[list]) >= level));
+ SETARG_A(fs->f->code[list], level);
+ }
+}
+
+
+/*
+** Emit instruction 'i', checking for array sizes and saving also its
+** line information. Return 'i' position.
+*/
+static int luaK_code (FuncState *fs, Instruction i) {
+ Proto *f = fs->f;
+ dischargejpc(fs); /* 'pc' will change */
+ /* put new instruction in code array */
+ luaM_growvector(fs->ls->L, f->code, fs->pc, f->sizecode, Instruction,
+ MAX_INT, "opcodes");
+ f->code[fs->pc] = i;
+ /* save corresponding line information */
+ luaM_growvector(fs->ls->L, f->lineinfo, fs->pc, f->sizelineinfo, int,
+ MAX_INT, "opcodes");
+ f->lineinfo[fs->pc] = fs->ls->lastline;
+ return fs->pc++;
+}
+
+
+/*
+** Format and emit an 'iABC' instruction. (Assertions check consistency
+** of parameters versus opcode.)
+*/
+int luaK_codeABC (FuncState *fs, OpCode o, int a, int b, int c) {
+ lua_assert(getOpMode(o) == iABC);
+ lua_assert(getBMode(o) != OpArgN || b == 0);
+ lua_assert(getCMode(o) != OpArgN || c == 0);
+ lua_assert(a <= MAXARG_A && b <= MAXARG_B && c <= MAXARG_C);
+ return luaK_code(fs, CREATE_ABC(o, a, b, c));
+}
+
+
+/*
+** Format and emit an 'iABx' instruction.
+*/
+int luaK_codeABx (FuncState *fs, OpCode o, int a, unsigned int bc) {
+ lua_assert(getOpMode(o) == iABx || getOpMode(o) == iAsBx);
+ lua_assert(getCMode(o) == OpArgN);
+ lua_assert(a <= MAXARG_A && bc <= MAXARG_Bx);
+ return luaK_code(fs, CREATE_ABx(o, a, bc));
+}
+
+
+/*
+** Emit an "extra argument" instruction (format 'iAx')
+*/
+static int codeextraarg (FuncState *fs, int a) {
+ lua_assert(a <= MAXARG_Ax);
+ return luaK_code(fs, CREATE_Ax(OP_EXTRAARG, a));
+}
+
+
+/*
+** Emit a "load constant" instruction, using either 'OP_LOADK'
+** (if constant index 'k' fits in 18 bits) or an 'OP_LOADKX'
+** instruction with "extra argument".
+*/
+int luaK_codek (FuncState *fs, int reg, int k) {
+ if (k <= MAXARG_Bx)
+ return luaK_codeABx(fs, OP_LOADK, reg, k);
+ else {
+ int p = luaK_codeABx(fs, OP_LOADKX, reg, 0);
+ codeextraarg(fs, k);
+ return p;
+ }
+}
+
+
+/*
+** Check register-stack level, keeping track of its maximum size
+** in field 'maxstacksize'
+*/
+void luaK_checkstack (FuncState *fs, int n) {
+ int newstack = fs->freereg + n;
+ if (newstack > fs->f->maxstacksize) {
+ if (newstack >= MAXREGS)
+ luaX_syntaxerror(fs->ls,
+ "function or expression needs too many registers");
+ fs->f->maxstacksize = cast_byte(newstack);
+ }
+}
+
+
+/*
+** Reserve 'n' registers in register stack
+*/
+void luaK_reserveregs (FuncState *fs, int n) {
+ luaK_checkstack(fs, n);
+ fs->freereg += n;
+}
+
+
+/*
+** Free register 'reg', if it is neither a constant index nor
+** a local variable.
+)
+*/
+static void freereg (FuncState *fs, int reg) {
+ if (!ISK(reg) && reg >= fs->nactvar) {
+ fs->freereg--;
+ lua_assert(reg == fs->freereg);
+ }
+}
+
+
+/*
+** Free register used by expression 'e' (if any)
+*/
+static void freeexp (FuncState *fs, expdesc *e) {
+ if (e->k == VNONRELOC)
+ freereg(fs, e->u.info);
+}
+
+
+/*
+** Free registers used by expressions 'e1' and 'e2' (if any) in proper
+** order.
+*/
+static void freeexps (FuncState *fs, expdesc *e1, expdesc *e2) {
+ int r1 = (e1->k == VNONRELOC) ? e1->u.info : -1;
+ int r2 = (e2->k == VNONRELOC) ? e2->u.info : -1;
+ if (r1 > r2) {
+ freereg(fs, r1);
+ freereg(fs, r2);
+ }
+ else {
+ freereg(fs, r2);
+ freereg(fs, r1);
+ }
+}
+
+
+/*
+** Add constant 'v' to prototype's list of constants (field 'k').
+** Use scanner's table to cache position of constants in constant list
+** and try to reuse constants. Because some values should not be used
+** as keys (nil cannot be a key, integer keys can collapse with float
+** keys), the caller must provide a useful 'key' for indexing the cache.
+*/
+static int addk (FuncState *fs, TValue *key, TValue *v) {
+ lua_State *L = fs->ls->L;
+ Proto *f = fs->f;
+ TValue *idx = luaH_set(L, fs->ls->h, key); /* index scanner table */
+ int k, oldsize;
+ if (ttisinteger(idx)) { /* is there an index there? */
+ k = cast_int(ivalue(idx));
+ /* correct value? (warning: must distinguish floats from integers!) */
+ if (k < fs->nk && ttype(&f->k[k]) == ttype(v) &&
+ luaV_rawequalobj(&f->k[k], v))
+ return k; /* reuse index */
+ }
+ /* constant not found; create a new entry */
+ oldsize = f->sizek;
+ k = fs->nk;
+ /* numerical value does not need GC barrier;
+ table has no metatable, so it does not need to invalidate cache */
+ setivalue(idx, k);
+ luaM_growvector(L, f->k, k, f->sizek, TValue, MAXARG_Ax, "constants");
+ while (oldsize < f->sizek) setnilvalue(&f->k[oldsize++]);
+ setobj(L, &f->k[k], v);
+ fs->nk++;
+ luaC_barrier(L, f, v);
+ return k;
+}
+
+
+/*
+** Add a string to list of constants and return its index.
+*/
+int luaK_stringK (FuncState *fs, TString *s) {
+ TValue o;
+ setsvalue(fs->ls->L, &o, s);
+ return addk(fs, &o, &o); /* use string itself as key */
+}
+
+
+/*
+** Add an integer to list of constants and return its index.
+** Integers use userdata as keys to avoid collision with floats with
+** same value; conversion to 'void*' is used only for hashing, so there
+** are no "precision" problems.
+*/
+int luaK_intK (FuncState *fs, lua_Integer n) {
+ TValue k, o;
+ setpvalue(&k, cast(void*, cast(size_t, n)));
+ setivalue(&o, n);
+ return addk(fs, &k, &o);
+}
+
+/*
+** Add a float to list of constants and return its index.
+*/
+static int luaK_numberK (FuncState *fs, lua_Number r) {
+ TValue o;
+ setfltvalue(&o, r);
+ return addk(fs, &o, &o); /* use number itself as key */
+}
+
+
+/*
+** Add a boolean to list of constants and return its index.
+*/
+static int boolK (FuncState *fs, int b) {
+ TValue o;
+ setbvalue(&o, b);
+ return addk(fs, &o, &o); /* use boolean itself as key */
+}
+
+
+/*
+** Add nil to list of constants and return its index.
+*/
+static int nilK (FuncState *fs) {
+ TValue k, v;
+ setnilvalue(&v);
+ /* cannot use nil as key; instead use table itself to represent nil */
+ sethvalue(fs->ls->L, &k, fs->ls->h);
+ return addk(fs, &k, &v);
+}
+
+
+/*
+** Fix an expression to return the number of results 'nresults'.
+** Either 'e' is a multi-ret expression (function call or vararg)
+** or 'nresults' is LUA_MULTRET (as any expression can satisfy that).
+*/
+void luaK_setreturns (FuncState *fs, expdesc *e, int nresults) {
+ if (e->k == VCALL) { /* expression is an open function call? */
+ SETARG_C(getinstruction(fs, e), nresults + 1);
+ }
+ else if (e->k == VVARARG) {
+ Instruction *pc = &getinstruction(fs, e);
+ SETARG_B(*pc, nresults + 1);
+ SETARG_A(*pc, fs->freereg);
+ luaK_reserveregs(fs, 1);
+ }
+ else lua_assert(nresults == LUA_MULTRET);
+}
+
+
+/*
+** Fix an expression to return one result.
+** If expression is not a multi-ret expression (function call or
+** vararg), it already returns one result, so nothing needs to be done.
+** Function calls become VNONRELOC expressions (as its result comes
+** fixed in the base register of the call), while vararg expressions
+** become VRELOCABLE (as OP_VARARG puts its results where it wants).
+** (Calls are created returning one result, so that does not need
+** to be fixed.)
+*/
+void luaK_setoneret (FuncState *fs, expdesc *e) {
+ if (e->k == VCALL) { /* expression is an open function call? */
+ /* already returns 1 value */
+ lua_assert(GETARG_C(getinstruction(fs, e)) == 2);
+ e->k = VNONRELOC; /* result has fixed position */
+ e->u.info = GETARG_A(getinstruction(fs, e));
+ }
+ else if (e->k == VVARARG) {
+ SETARG_B(getinstruction(fs, e), 2);
+ e->k = VRELOCABLE; /* can relocate its simple result */
+ }
+}
+
+
+/*
+** Ensure that expression 'e' is not a variable.
+*/
+void luaK_dischargevars (FuncState *fs, expdesc *e) {
+ switch (e->k) {
+ case VLOCAL: { /* already in a register */
+ e->k = VNONRELOC; /* becomes a non-relocatable value */
+ break;
+ }
+ case VUPVAL: { /* move value to some (pending) register */
+ e->u.info = luaK_codeABC(fs, OP_GETUPVAL, 0, e->u.info, 0);
+ e->k = VRELOCABLE;
+ break;
+ }
+ case VINDEXED: {
+ OpCode op;
+ freereg(fs, e->u.ind.idx);
+ if (e->u.ind.vt == VLOCAL) { /* is 't' in a register? */
+ freereg(fs, e->u.ind.t);
+ op = OP_GETTABLE;
+ }
+ else {
+ lua_assert(e->u.ind.vt == VUPVAL);
+ op = OP_GETTABUP; /* 't' is in an upvalue */
+ }
+ e->u.info = luaK_codeABC(fs, op, 0, e->u.ind.t, e->u.ind.idx);
+ e->k = VRELOCABLE;
+ break;
+ }
+ case VVARARG: case VCALL: {
+ luaK_setoneret(fs, e);
+ break;
+ }
+ default: break; /* there is one value available (somewhere) */
+ }
+}
+
+
+/*
+** Ensures expression value is in register 'reg' (and therefore
+** 'e' will become a non-relocatable expression).
+*/
+static void discharge2reg (FuncState *fs, expdesc *e, int reg) {
+ luaK_dischargevars(fs, e);
+ switch (e->k) {
+ case VNIL: {
+ luaK_nil(fs, reg, 1);
+ break;
+ }
+ case VFALSE: case VTRUE: {
+ luaK_codeABC(fs, OP_LOADBOOL, reg, e->k == VTRUE, 0);
+ break;
+ }
+ case VK: {
+ luaK_codek(fs, reg, e->u.info);
+ break;
+ }
+ case VKFLT: {
+ luaK_codek(fs, reg, luaK_numberK(fs, e->u.nval));
+ break;
+ }
+ case VKINT: {
+ luaK_codek(fs, reg, luaK_intK(fs, e->u.ival));
+ break;
+ }
+ case VRELOCABLE: {
+ Instruction *pc = &getinstruction(fs, e);
+ SETARG_A(*pc, reg); /* instruction will put result in 'reg' */
+ break;
+ }
+ case VNONRELOC: {
+ if (reg != e->u.info)
+ luaK_codeABC(fs, OP_MOVE, reg, e->u.info, 0);
+ break;
+ }
+ default: {
+ lua_assert(e->k == VJMP);
+ return; /* nothing to do... */
+ }
+ }
+ e->u.info = reg;
+ e->k = VNONRELOC;
+}
+
+
+/*
+** Ensures expression value is in any register.
+*/
+static void discharge2anyreg (FuncState *fs, expdesc *e) {
+ if (e->k != VNONRELOC) { /* no fixed register yet? */
+ luaK_reserveregs(fs, 1); /* get a register */
+ discharge2reg(fs, e, fs->freereg-1); /* put value there */
+ }
+}
+
+
+static int code_loadbool (FuncState *fs, int A, int b, int jump) {
+ luaK_getlabel(fs); /* those instructions may be jump targets */
+ return luaK_codeABC(fs, OP_LOADBOOL, A, b, jump);
+}
+
+
+/*
+** check whether list has any jump that do not produce a value
+** or produce an inverted value
+*/
+static int need_value (FuncState *fs, int list) {
+ for (; list != NO_JUMP; list = getjump(fs, list)) {
+ Instruction i = *getjumpcontrol(fs, list);
+ if (GET_OPCODE(i) != OP_TESTSET) return 1;
+ }
+ return 0; /* not found */
+}
+
+
+/*
+** Ensures final expression result (including results from its jump
+** lists) is in register 'reg'.
+** If expression has jumps, need to patch these jumps either to
+** its final position or to "load" instructions (for those tests
+** that do not produce values).
+*/
+static void exp2reg (FuncState *fs, expdesc *e, int reg) {
+ discharge2reg(fs, e, reg);
+ if (e->k == VJMP) /* expression itself is a test? */
+ luaK_concat(fs, &e->t, e->u.info); /* put this jump in 't' list */
+ if (hasjumps(e)) {
+ int final; /* position after whole expression */
+ int p_f = NO_JUMP; /* position of an eventual LOAD false */
+ int p_t = NO_JUMP; /* position of an eventual LOAD true */
+ if (need_value(fs, e->t) || need_value(fs, e->f)) {
+ int fj = (e->k == VJMP) ? NO_JUMP : luaK_jump(fs);
+ p_f = code_loadbool(fs, reg, 0, 1);
+ p_t = code_loadbool(fs, reg, 1, 0);
+ luaK_patchtohere(fs, fj);
+ }
+ final = luaK_getlabel(fs);
+ patchlistaux(fs, e->f, final, reg, p_f);
+ patchlistaux(fs, e->t, final, reg, p_t);
+ }
+ e->f = e->t = NO_JUMP;
+ e->u.info = reg;
+ e->k = VNONRELOC;
+}
+
+
+/*
+** Ensures final expression result (including results from its jump
+** lists) is in next available register.
+*/
+void luaK_exp2nextreg (FuncState *fs, expdesc *e) {
+ luaK_dischargevars(fs, e);
+ freeexp(fs, e);
+ luaK_reserveregs(fs, 1);
+ exp2reg(fs, e, fs->freereg - 1);
+}
+
+
+/*
+** Ensures final expression result (including results from its jump
+** lists) is in some (any) register and return that register.
+*/
+int luaK_exp2anyreg (FuncState *fs, expdesc *e) {
+ luaK_dischargevars(fs, e);
+ if (e->k == VNONRELOC) { /* expression already has a register? */
+ if (!hasjumps(e)) /* no jumps? */
+ return e->u.info; /* result is already in a register */
+ if (e->u.info >= fs->nactvar) { /* reg. is not a local? */
+ exp2reg(fs, e, e->u.info); /* put final result in it */
+ return e->u.info;
+ }
+ }
+ luaK_exp2nextreg(fs, e); /* otherwise, use next available register */
+ return e->u.info;
+}
+
+
+/*
+** Ensures final expression result is either in a register or in an
+** upvalue.
+*/
+void luaK_exp2anyregup (FuncState *fs, expdesc *e) {
+ if (e->k != VUPVAL || hasjumps(e))
+ luaK_exp2anyreg(fs, e);
+}
+
+
+/*
+** Ensures final expression result is either in a register or it is
+** a constant.
+*/
+void luaK_exp2val (FuncState *fs, expdesc *e) {
+ if (hasjumps(e))
+ luaK_exp2anyreg(fs, e);
+ else
+ luaK_dischargevars(fs, e);
+}
+
+
+/*
+** Ensures final expression result is in a valid R/K index
+** (that is, it is either in a register or in 'k' with an index
+** in the range of R/K indices).
+** Returns R/K index.
+*/
+int luaK_exp2RK (FuncState *fs, expdesc *e) {
+ luaK_exp2val(fs, e);
+ switch (e->k) { /* move constants to 'k' */
+ case VTRUE: e->u.info = boolK(fs, 1); goto vk;
+ case VFALSE: e->u.info = boolK(fs, 0); goto vk;
+ case VNIL: e->u.info = nilK(fs); goto vk;
+ case VKINT: e->u.info = luaK_intK(fs, e->u.ival); goto vk;
+ case VKFLT: e->u.info = luaK_numberK(fs, e->u.nval); goto vk;
+ case VK:
+ vk:
+ e->k = VK;
+ if (e->u.info <= MAXINDEXRK) /* constant fits in 'argC'? */
+ return RKASK(e->u.info);
+ else break;
+ default: break;
+ }
+ /* not a constant in the right range: put it in a register */
+ return luaK_exp2anyreg(fs, e);
+}
+
+
+/*
+** Generate code to store result of expression 'ex' into variable 'var'.
+*/
+void luaK_storevar (FuncState *fs, expdesc *var, expdesc *ex) {
+ switch (var->k) {
+ case VLOCAL: {
+ freeexp(fs, ex);
+ exp2reg(fs, ex, var->u.info); /* compute 'ex' into proper place */
+ return;
+ }
+ case VUPVAL: {
+ int e = luaK_exp2anyreg(fs, ex);
+ luaK_codeABC(fs, OP_SETUPVAL, e, var->u.info, 0);
+ break;
+ }
+ case VINDEXED: {
+ OpCode op = (var->u.ind.vt == VLOCAL) ? OP_SETTABLE : OP_SETTABUP;
+ int e = luaK_exp2RK(fs, ex);
+ luaK_codeABC(fs, op, var->u.ind.t, var->u.ind.idx, e);
+ break;
+ }
+ default: lua_assert(0); /* invalid var kind to store */
+ }
+ freeexp(fs, ex);
+}
+
+
+/*
+** Emit SELF instruction (convert expression 'e' into 'e:key(e,').
+*/
+void luaK_self (FuncState *fs, expdesc *e, expdesc *key) {
+ int ereg;
+ luaK_exp2anyreg(fs, e);
+ ereg = e->u.info; /* register where 'e' was placed */
+ freeexp(fs, e);
+ e->u.info = fs->freereg; /* base register for op_self */
+ e->k = VNONRELOC; /* self expression has a fixed register */
+ luaK_reserveregs(fs, 2); /* function and 'self' produced by op_self */
+ luaK_codeABC(fs, OP_SELF, e->u.info, ereg, luaK_exp2RK(fs, key));
+ freeexp(fs, key);
+}
+
+
+/*
+** Negate condition 'e' (where 'e' is a comparison).
+*/
+static void negatecondition (FuncState *fs, expdesc *e) {
+ Instruction *pc = getjumpcontrol(fs, e->u.info);
+ lua_assert(testTMode(GET_OPCODE(*pc)) && GET_OPCODE(*pc) != OP_TESTSET &&
+ GET_OPCODE(*pc) != OP_TEST);
+ SETARG_A(*pc, !(GETARG_A(*pc)));
+}
+
+
+/*
+** Emit instruction to jump if 'e' is 'cond' (that is, if 'cond'
+** is true, code will jump if 'e' is true.) Return jump position.
+** Optimize when 'e' is 'not' something, inverting the condition
+** and removing the 'not'.
+*/
+static int jumponcond (FuncState *fs, expdesc *e, int cond) {
+ if (e->k == VRELOCABLE) {
+ Instruction ie = getinstruction(fs, e);
+ if (GET_OPCODE(ie) == OP_NOT) {
+ fs->pc--; /* remove previous OP_NOT */
+ return condjump(fs, OP_TEST, GETARG_B(ie), 0, !cond);
+ }
+ /* else go through */
+ }
+ discharge2anyreg(fs, e);
+ freeexp(fs, e);
+ return condjump(fs, OP_TESTSET, NO_REG, e->u.info, cond);
+}
+
+
+/*
+** Emit code to go through if 'e' is true, jump otherwise.
+*/
+void luaK_goiftrue (FuncState *fs, expdesc *e) {
+ int pc; /* pc of new jump */
+ luaK_dischargevars(fs, e);
+ switch (e->k) {
+ case VJMP: { /* condition? */
+ negatecondition(fs, e); /* jump when it is false */
+ pc = e->u.info; /* save jump position */
+ break;
+ }
+ case VK: case VKFLT: case VKINT: case VTRUE: {
+ pc = NO_JUMP; /* always true; do nothing */
+ break;
+ }
+ default: {
+ pc = jumponcond(fs, e, 0); /* jump when false */
+ break;
+ }
+ }
+ luaK_concat(fs, &e->f, pc); /* insert new jump in false list */
+ luaK_patchtohere(fs, e->t); /* true list jumps to here (to go through) */
+ e->t = NO_JUMP;
+}
+
+
+/*
+** Emit code to go through if 'e' is false, jump otherwise.
+*/
+void luaK_goiffalse (FuncState *fs, expdesc *e) {
+ int pc; /* pc of new jump */
+ luaK_dischargevars(fs, e);
+ switch (e->k) {
+ case VJMP: {
+ pc = e->u.info; /* already jump if true */
+ break;
+ }
+ case VNIL: case VFALSE: {
+ pc = NO_JUMP; /* always false; do nothing */
+ break;
+ }
+ default: {
+ pc = jumponcond(fs, e, 1); /* jump if true */
+ break;
+ }
+ }
+ luaK_concat(fs, &e->t, pc); /* insert new jump in 't' list */
+ luaK_patchtohere(fs, e->f); /* false list jumps to here (to go through) */
+ e->f = NO_JUMP;
+}
+
+
+/*
+** Code 'not e', doing constant folding.
+*/
+static void codenot (FuncState *fs, expdesc *e) {
+ luaK_dischargevars(fs, e);
+ switch (e->k) {
+ case VNIL: case VFALSE: {
+ e->k = VTRUE; /* true == not nil == not false */
+ break;
+ }
+ case VK: case VKFLT: case VKINT: case VTRUE: {
+ e->k = VFALSE; /* false == not "x" == not 0.5 == not 1 == not true */
+ break;
+ }
+ case VJMP: {
+ negatecondition(fs, e);
+ break;
+ }
+ case VRELOCABLE:
+ case VNONRELOC: {
+ discharge2anyreg(fs, e);
+ freeexp(fs, e);
+ e->u.info = luaK_codeABC(fs, OP_NOT, 0, e->u.info, 0);
+ e->k = VRELOCABLE;
+ break;
+ }
+ default: lua_assert(0); /* cannot happen */
+ }
+ /* interchange true and false lists */
+ { int temp = e->f; e->f = e->t; e->t = temp; }
+ removevalues(fs, e->f); /* values are useless when negated */
+ removevalues(fs, e->t);
+}
+
+
+/*
+** Create expression 't[k]'. 't' must have its final result already in a
+** register or upvalue.
+*/
+void luaK_indexed (FuncState *fs, expdesc *t, expdesc *k) {
+ lua_assert(!hasjumps(t) && (vkisinreg(t->k) || t->k == VUPVAL));
+ t->u.ind.t = t->u.info; /* register or upvalue index */
+ t->u.ind.idx = luaK_exp2RK(fs, k); /* R/K index for key */
+ t->u.ind.vt = (t->k == VUPVAL) ? VUPVAL : VLOCAL;
+ t->k = VINDEXED;
+}
+
+
+/*
+** Return false if folding can raise an error.
+** Bitwise operations need operands convertible to integers; division
+** operations cannot have 0 as divisor.
+*/
+static int validop (int op, TValue *v1, TValue *v2) {
+ switch (op) {
+ case LUA_OPBAND: case LUA_OPBOR: case LUA_OPBXOR:
+ case LUA_OPSHL: case LUA_OPSHR: case LUA_OPBNOT: { /* conversion errors */
+ lua_Integer i;
+ return (tointeger(v1, &i) && tointeger(v2, &i));
+ }
+ case LUA_OPDIV: case LUA_OPIDIV: case LUA_OPMOD: /* division by 0 */
+ return (nvalue(v2) != 0);
+ default: return 1; /* everything else is valid */
+ }
+}
+
+
+/*
+** Try to "constant-fold" an operation; return 1 iff successful.
+** (In this case, 'e1' has the final result.)
+*/
+static int constfolding (FuncState *fs, int op, expdesc *e1,
+ const expdesc *e2) {
+ TValue v1, v2, res;
+ if (!tonumeral(e1, &v1) || !tonumeral(e2, &v2) || !validop(op, &v1, &v2))
+ return 0; /* non-numeric operands or not safe to fold */
+ luaO_arith(fs->ls->L, op, &v1, &v2, &res); /* does operation */
+ if (ttisinteger(&res)) {
+ e1->k = VKINT;
+ e1->u.ival = ivalue(&res);
+ }
+ else { /* folds neither NaN nor 0.0 (to avoid problems with -0.0) */
+ lua_Number n = fltvalue(&res);
+ if (luai_numisnan(n) || n == 0)
+ return 0;
+ e1->k = VKFLT;
+ e1->u.nval = n;
+ }
+ return 1;
+}
+
+
+/*
+** Emit code for unary expressions that "produce values"
+** (everything but 'not').
+** Expression to produce final result will be encoded in 'e'.
+*/
+static void codeunexpval (FuncState *fs, OpCode op, expdesc *e, int line) {
+ int r = luaK_exp2anyreg(fs, e); /* opcodes operate only on registers */
+ freeexp(fs, e);
+ e->u.info = luaK_codeABC(fs, op, 0, r, 0); /* generate opcode */
+ e->k = VRELOCABLE; /* all those operations are relocatable */
+ luaK_fixline(fs, line);
+}
+
+
+/*
+** Emit code for binary expressions that "produce values"
+** (everything but logical operators 'and'/'or' and comparison
+** operators).
+** Expression to produce final result will be encoded in 'e1'.
+** Because 'luaK_exp2RK' can free registers, its calls must be
+** in "stack order" (that is, first on 'e2', which may have more
+** recent registers to be released).
+*/
+static void codebinexpval (FuncState *fs, OpCode op,
+ expdesc *e1, expdesc *e2, int line) {
+ int rk2 = luaK_exp2RK(fs, e2); /* both operands are "RK" */
+ int rk1 = luaK_exp2RK(fs, e1);
+ freeexps(fs, e1, e2);
+ e1->u.info = luaK_codeABC(fs, op, 0, rk1, rk2); /* generate opcode */
+ e1->k = VRELOCABLE; /* all those operations are relocatable */
+ luaK_fixline(fs, line);
+}
+
+
+/*
+** Emit code for comparisons.
+** 'e1' was already put in R/K form by 'luaK_infix'.
+*/
+static void codecomp (FuncState *fs, BinOpr opr, expdesc *e1, expdesc *e2) {
+ int rk1 = (e1->k == VK) ? RKASK(e1->u.info)
+ : check_exp(e1->k == VNONRELOC, e1->u.info);
+ int rk2 = luaK_exp2RK(fs, e2);
+ freeexps(fs, e1, e2);
+ switch (opr) {
+ case OPR_NE: { /* '(a ~= b)' ==> 'not (a == b)' */
+ e1->u.info = condjump(fs, OP_EQ, 0, rk1, rk2);
+ break;
+ }
+ case OPR_GT: case OPR_GE: {
+ /* '(a > b)' ==> '(b < a)'; '(a >= b)' ==> '(b <= a)' */
+ OpCode op = cast(OpCode, (opr - OPR_NE) + OP_EQ);
+ e1->u.info = condjump(fs, op, 1, rk2, rk1); /* invert operands */
+ break;
+ }
+ default: { /* '==', '<', '<=' use their own opcodes */
+ OpCode op = cast(OpCode, (opr - OPR_EQ) + OP_EQ);
+ e1->u.info = condjump(fs, op, 1, rk1, rk2);
+ break;
+ }
+ }
+ e1->k = VJMP;
+}
+
+
+/*
+** Aplly prefix operation 'op' to expression 'e'.
+*/
+void luaK_prefix (FuncState *fs, UnOpr op, expdesc *e, int line) {
+ static const expdesc ef = {VKINT, {0}, NO_JUMP, NO_JUMP};
+ switch (op) {
+ case OPR_MINUS: case OPR_BNOT: /* use 'ef' as fake 2nd operand */
+ if (constfolding(fs, op + LUA_OPUNM, e, &ef))
+ break;
+ /* FALLTHROUGH */
+ case OPR_LEN:
+ codeunexpval(fs, cast(OpCode, op + OP_UNM), e, line);
+ break;
+ case OPR_NOT: codenot(fs, e); break;
+ default: lua_assert(0);
+ }
+}
+
+
+/*
+** Process 1st operand 'v' of binary operation 'op' before reading
+** 2nd operand.
+*/
+void luaK_infix (FuncState *fs, BinOpr op, expdesc *v) {
+ switch (op) {
+ case OPR_AND: {
+ luaK_goiftrue(fs, v); /* go ahead only if 'v' is true */
+ break;
+ }
+ case OPR_OR: {
+ luaK_goiffalse(fs, v); /* go ahead only if 'v' is false */
+ break;
+ }
+ case OPR_CONCAT: {
+ luaK_exp2nextreg(fs, v); /* operand must be on the 'stack' */
+ break;
+ }
+ case OPR_ADD: case OPR_SUB:
+ case OPR_MUL: case OPR_DIV: case OPR_IDIV:
+ case OPR_MOD: case OPR_POW:
+ case OPR_BAND: case OPR_BOR: case OPR_BXOR:
+ case OPR_SHL: case OPR_SHR: {
+ if (!tonumeral(v, NULL))
+ luaK_exp2RK(fs, v);
+ /* else keep numeral, which may be folded with 2nd operand */
+ break;
+ }
+ default: {
+ luaK_exp2RK(fs, v);
+ break;
+ }
+ }
+}
+
+
+/*
+** Finalize code for binary operation, after reading 2nd operand.
+** For '(a .. b .. c)' (which is '(a .. (b .. c))', because
+** concatenation is right associative), merge second CONCAT into first
+** one.
+*/
+void luaK_posfix (FuncState *fs, BinOpr op,
+ expdesc *e1, expdesc *e2, int line) {
+ switch (op) {
+ case OPR_AND: {
+ lua_assert(e1->t == NO_JUMP); /* list closed by 'luK_infix' */
+ luaK_dischargevars(fs, e2);
+ luaK_concat(fs, &e2->f, e1->f);
+ *e1 = *e2;
+ break;
+ }
+ case OPR_OR: {
+ lua_assert(e1->f == NO_JUMP); /* list closed by 'luK_infix' */
+ luaK_dischargevars(fs, e2);
+ luaK_concat(fs, &e2->t, e1->t);
+ *e1 = *e2;
+ break;
+ }
+ case OPR_CONCAT: {
+ luaK_exp2val(fs, e2);
+ if (e2->k == VRELOCABLE &&
+ GET_OPCODE(getinstruction(fs, e2)) == OP_CONCAT) {
+ lua_assert(e1->u.info == GETARG_B(getinstruction(fs, e2))-1);
+ freeexp(fs, e1);
+ SETARG_B(getinstruction(fs, e2), e1->u.info);
+ e1->k = VRELOCABLE; e1->u.info = e2->u.info;
+ }
+ else {
+ luaK_exp2nextreg(fs, e2); /* operand must be on the 'stack' */
+ codebinexpval(fs, OP_CONCAT, e1, e2, line);
+ }
+ break;
+ }
+ case OPR_ADD: case OPR_SUB: case OPR_MUL: case OPR_DIV:
+ case OPR_IDIV: case OPR_MOD: case OPR_POW:
+ case OPR_BAND: case OPR_BOR: case OPR_BXOR:
+ case OPR_SHL: case OPR_SHR: {
+ if (!constfolding(fs, op + LUA_OPADD, e1, e2))
+ codebinexpval(fs, cast(OpCode, op + OP_ADD), e1, e2, line);
+ break;
+ }
+ case OPR_EQ: case OPR_LT: case OPR_LE:
+ case OPR_NE: case OPR_GT: case OPR_GE: {
+ codecomp(fs, op, e1, e2);
+ break;
+ }
+ default: lua_assert(0);
+ }
+}
+
+
+/*
+** Change line information associated with current position.
+*/
+void luaK_fixline (FuncState *fs, int line) {
+ fs->f->lineinfo[fs->pc - 1] = line;
+}
+
+
+/*
+** Emit a SETLIST instruction.
+** 'base' is register that keeps table;
+** 'nelems' is #table plus those to be stored now;
+** 'tostore' is number of values (in registers 'base + 1',...) to add to
+** table (or LUA_MULTRET to add up to stack top).
+*/
+void luaK_setlist (FuncState *fs, int base, int nelems, int tostore) {
+ int c = (nelems - 1)/LFIELDS_PER_FLUSH + 1;
+ int b = (tostore == LUA_MULTRET) ? 0 : tostore;
+ lua_assert(tostore != 0 && tostore <= LFIELDS_PER_FLUSH);
+ if (c <= MAXARG_C)
+ luaK_codeABC(fs, OP_SETLIST, base, b, c);
+ else if (c <= MAXARG_Ax) {
+ luaK_codeABC(fs, OP_SETLIST, base, b, 0);
+ codeextraarg(fs, c);
+ }
+ else
+ luaX_syntaxerror(fs->ls, "constructor too long");
+ fs->freereg = base + 1; /* free registers with list values */
+}
+
diff --git a/lua/src/lcode.h b/lua/src/lcode.h
new file mode 100644
index 000000000..cd306d573
--- /dev/null
+++ b/lua/src/lcode.h
@@ -0,0 +1,88 @@
+/*
+** $Id: lcode.h,v 1.64 2016/01/05 16:22:37 roberto Exp $
+** Code generator for Lua
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lcode_h
+#define lcode_h
+
+#include "llex.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lparser.h"
+
+
+/*
+** Marks the end of a patch list. It is an invalid value both as an absolute
+** address, and as a list link (would link an element to itself).
+*/
+#define NO_JUMP (-1)
+
+
+/*
+** grep "ORDER OPR" if you change these enums (ORDER OP)
+*/
+typedef enum BinOpr {
+ OPR_ADD, OPR_SUB, OPR_MUL, OPR_MOD, OPR_POW,
+ OPR_DIV,
+ OPR_IDIV,
+ OPR_BAND, OPR_BOR, OPR_BXOR,
+ OPR_SHL, OPR_SHR,
+ OPR_CONCAT,
+ OPR_EQ, OPR_LT, OPR_LE,
+ OPR_NE, OPR_GT, OPR_GE,
+ OPR_AND, OPR_OR,
+ OPR_NOBINOPR
+} BinOpr;
+
+
+typedef enum UnOpr { OPR_MINUS, OPR_BNOT, OPR_NOT, OPR_LEN, OPR_NOUNOPR } UnOpr;
+
+
+/* get (pointer to) instruction of given 'expdesc' */
+#define getinstruction(fs,e) ((fs)->f->code[(e)->u.info])
+
+#define luaK_codeAsBx(fs,o,A,sBx) luaK_codeABx(fs,o,A,(sBx)+MAXARG_sBx)
+
+#define luaK_setmultret(fs,e) luaK_setreturns(fs, e, LUA_MULTRET)
+
+#define luaK_jumpto(fs,t) luaK_patchlist(fs, luaK_jump(fs), t)
+
+LUAI_FUNC int luaK_codeABx (FuncState *fs, OpCode o, int A, unsigned int Bx);
+LUAI_FUNC int luaK_codeABC (FuncState *fs, OpCode o, int A, int B, int C);
+LUAI_FUNC int luaK_codek (FuncState *fs, int reg, int k);
+LUAI_FUNC void luaK_fixline (FuncState *fs, int line);
+LUAI_FUNC void luaK_nil (FuncState *fs, int from, int n);
+LUAI_FUNC void luaK_reserveregs (FuncState *fs, int n);
+LUAI_FUNC void luaK_checkstack (FuncState *fs, int n);
+LUAI_FUNC int luaK_stringK (FuncState *fs, TString *s);
+LUAI_FUNC int luaK_intK (FuncState *fs, lua_Integer n);
+LUAI_FUNC void luaK_dischargevars (FuncState *fs, expdesc *e);
+LUAI_FUNC int luaK_exp2anyreg (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_exp2anyregup (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_exp2nextreg (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_exp2val (FuncState *fs, expdesc *e);
+LUAI_FUNC int luaK_exp2RK (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_self (FuncState *fs, expdesc *e, expdesc *key);
+LUAI_FUNC void luaK_indexed (FuncState *fs, expdesc *t, expdesc *k);
+LUAI_FUNC void luaK_goiftrue (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_goiffalse (FuncState *fs, expdesc *e);
+LUAI_FUNC void luaK_storevar (FuncState *fs, expdesc *var, expdesc *e);
+LUAI_FUNC void luaK_setreturns (FuncState *fs, expdesc *e, int nresults);
+LUAI_FUNC void luaK_setoneret (FuncState *fs, expdesc *e);
+LUAI_FUNC int luaK_jump (FuncState *fs);
+LUAI_FUNC void luaK_ret (FuncState *fs, int first, int nret);
+LUAI_FUNC void luaK_patchlist (FuncState *fs, int list, int target);
+LUAI_FUNC void luaK_patchtohere (FuncState *fs, int list);
+LUAI_FUNC void luaK_patchclose (FuncState *fs, int list, int level);
+LUAI_FUNC void luaK_concat (FuncState *fs, int *l1, int l2);
+LUAI_FUNC int luaK_getlabel (FuncState *fs);
+LUAI_FUNC void luaK_prefix (FuncState *fs, UnOpr op, expdesc *v, int line);
+LUAI_FUNC void luaK_infix (FuncState *fs, BinOpr op, expdesc *v);
+LUAI_FUNC void luaK_posfix (FuncState *fs, BinOpr op, expdesc *v1,
+ expdesc *v2, int line);
+LUAI_FUNC void luaK_setlist (FuncState *fs, int base, int nelems, int tostore);
+
+
+#endif
diff --git a/lua/src/lcorolib.c b/lua/src/lcorolib.c
new file mode 100644
index 000000000..2303429e7
--- /dev/null
+++ b/lua/src/lcorolib.c
@@ -0,0 +1,168 @@
+/*
+** $Id: lcorolib.c,v 1.10 2016/04/11 19:19:55 roberto Exp $
+** Coroutine Library
+** See Copyright Notice in lua.h
+*/
+
+#define lcorolib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <stdlib.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+static lua_State *getco (lua_State *L) {
+ lua_State *co = lua_tothread(L, 1);
+ luaL_argcheck(L, co, 1, "thread expected");
+ return co;
+}
+
+
+static int auxresume (lua_State *L, lua_State *co, int narg) {
+ int status;
+ if (!lua_checkstack(co, narg)) {
+ lua_pushliteral(L, "too many arguments to resume");
+ return -1; /* error flag */
+ }
+ if (lua_status(co) == LUA_OK && lua_gettop(co) == 0) {
+ lua_pushliteral(L, "cannot resume dead coroutine");
+ return -1; /* error flag */
+ }
+ lua_xmove(L, co, narg);
+ status = lua_resume(co, L, narg);
+ if (status == LUA_OK || status == LUA_YIELD) {
+ int nres = lua_gettop(co);
+ if (!lua_checkstack(L, nres + 1)) {
+ lua_pop(co, nres); /* remove results anyway */
+ lua_pushliteral(L, "too many results to resume");
+ return -1; /* error flag */
+ }
+ lua_xmove(co, L, nres); /* move yielded values */
+ return nres;
+ }
+ else {
+ lua_xmove(co, L, 1); /* move error message */
+ return -1; /* error flag */
+ }
+}
+
+
+static int luaB_coresume (lua_State *L) {
+ lua_State *co = getco(L);
+ int r;
+ r = auxresume(L, co, lua_gettop(L) - 1);
+ if (r < 0) {
+ lua_pushboolean(L, 0);
+ lua_insert(L, -2);
+ return 2; /* return false + error message */
+ }
+ else {
+ lua_pushboolean(L, 1);
+ lua_insert(L, -(r + 1));
+ return r + 1; /* return true + 'resume' returns */
+ }
+}
+
+
+static int luaB_auxwrap (lua_State *L) {
+ lua_State *co = lua_tothread(L, lua_upvalueindex(1));
+ int r = auxresume(L, co, lua_gettop(L));
+ if (r < 0) {
+ if (lua_type(L, -1) == LUA_TSTRING) { /* error object is a string? */
+ luaL_where(L, 1); /* add extra info */
+ lua_insert(L, -2);
+ lua_concat(L, 2);
+ }
+ return lua_error(L); /* propagate error */
+ }
+ return r;
+}
+
+
+static int luaB_cocreate (lua_State *L) {
+ lua_State *NL;
+ luaL_checktype(L, 1, LUA_TFUNCTION);
+ NL = lua_newthread(L);
+ lua_pushvalue(L, 1); /* move function to top */
+ lua_xmove(L, NL, 1); /* move function from L to NL */
+ return 1;
+}
+
+
+static int luaB_cowrap (lua_State *L) {
+ luaB_cocreate(L);
+ lua_pushcclosure(L, luaB_auxwrap, 1);
+ return 1;
+}
+
+
+static int luaB_yield (lua_State *L) {
+ return lua_yield(L, lua_gettop(L));
+}
+
+
+static int luaB_costatus (lua_State *L) {
+ lua_State *co = getco(L);
+ if (L == co) lua_pushliteral(L, "running");
+ else {
+ switch (lua_status(co)) {
+ case LUA_YIELD:
+ lua_pushliteral(L, "suspended");
+ break;
+ case LUA_OK: {
+ lua_Debug ar;
+ if (lua_getstack(co, 0, &ar) > 0) /* does it have frames? */
+ lua_pushliteral(L, "normal"); /* it is running */
+ else if (lua_gettop(co) == 0)
+ lua_pushliteral(L, "dead");
+ else
+ lua_pushliteral(L, "suspended"); /* initial state */
+ break;
+ }
+ default: /* some error occurred */
+ lua_pushliteral(L, "dead");
+ break;
+ }
+ }
+ return 1;
+}
+
+
+static int luaB_yieldable (lua_State *L) {
+ lua_pushboolean(L, lua_isyieldable(L));
+ return 1;
+}
+
+
+static int luaB_corunning (lua_State *L) {
+ int ismain = lua_pushthread(L);
+ lua_pushboolean(L, ismain);
+ return 2;
+}
+
+
+static const luaL_Reg co_funcs[] = {
+ {"create", luaB_cocreate},
+ {"resume", luaB_coresume},
+ {"running", luaB_corunning},
+ {"status", luaB_costatus},
+ {"wrap", luaB_cowrap},
+ {"yield", luaB_yield},
+ {"isyieldable", luaB_yieldable},
+ {NULL, NULL}
+};
+
+
+
+LUAMOD_API int luaopen_coroutine (lua_State *L) {
+ luaL_newlib(L, co_funcs);
+ return 1;
+}
+
diff --git a/lua/src/lctype.c b/lua/src/lctype.c
new file mode 100644
index 000000000..ae9367e69
--- /dev/null
+++ b/lua/src/lctype.c
@@ -0,0 +1,55 @@
+/*
+** $Id: lctype.c,v 1.12 2014/11/02 19:19:04 roberto Exp $
+** 'ctype' functions for Lua
+** See Copyright Notice in lua.h
+*/
+
+#define lctype_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include "lctype.h"
+
+#if !LUA_USE_CTYPE /* { */
+
+#include <limits.h>
+
+LUAI_DDEF const lu_byte luai_ctype_[UCHAR_MAX + 2] = {
+ 0x00, /* EOZ */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* 0. */
+ 0x00, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* 1. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x0c, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, /* 2. */
+ 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
+ 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, /* 3. */
+ 0x16, 0x16, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
+ 0x04, 0x15, 0x15, 0x15, 0x15, 0x15, 0x15, 0x05, /* 4. */
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, /* 5. */
+ 0x05, 0x05, 0x05, 0x04, 0x04, 0x04, 0x04, 0x05,
+ 0x04, 0x15, 0x15, 0x15, 0x15, 0x15, 0x15, 0x05, /* 6. */
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, /* 7. */
+ 0x05, 0x05, 0x05, 0x04, 0x04, 0x04, 0x04, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* 8. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* 9. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* a. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* b. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* c. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* d. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* e. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* f. */
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+};
+
+#endif /* } */
diff --git a/lua/src/lctype.h b/lua/src/lctype.h
new file mode 100644
index 000000000..99c7d1223
--- /dev/null
+++ b/lua/src/lctype.h
@@ -0,0 +1,95 @@
+/*
+** $Id: lctype.h,v 1.12 2011/07/15 12:50:29 roberto Exp $
+** 'ctype' functions for Lua
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lctype_h
+#define lctype_h
+
+#include "lua.h"
+
+
+/*
+** WARNING: the functions defined here do not necessarily correspond
+** to the similar functions in the standard C ctype.h. They are
+** optimized for the specific needs of Lua
+*/
+
+#if !defined(LUA_USE_CTYPE)
+
+#if 'A' == 65 && '0' == 48
+/* ASCII case: can use its own tables; faster and fixed */
+#define LUA_USE_CTYPE 0
+#else
+/* must use standard C ctype */
+#define LUA_USE_CTYPE 1
+#endif
+
+#endif
+
+
+#if !LUA_USE_CTYPE /* { */
+
+#include <limits.h>
+
+#include "llimits.h"
+
+
+#define ALPHABIT 0
+#define DIGITBIT 1
+#define PRINTBIT 2
+#define SPACEBIT 3
+#define XDIGITBIT 4
+
+
+#define MASK(B) (1 << (B))
+
+
+/*
+** add 1 to char to allow index -1 (EOZ)
+*/
+#define testprop(c,p) (luai_ctype_[(c)+1] & (p))
+
+/*
+** 'lalpha' (Lua alphabetic) and 'lalnum' (Lua alphanumeric) both include '_'
+*/
+#define lislalpha(c) testprop(c, MASK(ALPHABIT))
+#define lislalnum(c) testprop(c, (MASK(ALPHABIT) | MASK(DIGITBIT)))
+#define lisdigit(c) testprop(c, MASK(DIGITBIT))
+#define lisspace(c) testprop(c, MASK(SPACEBIT))
+#define lisprint(c) testprop(c, MASK(PRINTBIT))
+#define lisxdigit(c) testprop(c, MASK(XDIGITBIT))
+
+/*
+** this 'ltolower' only works for alphabetic characters
+*/
+#define ltolower(c) ((c) | ('A' ^ 'a'))
+
+
+/* two more entries for 0 and -1 (EOZ) */
+LUAI_DDEC const lu_byte luai_ctype_[UCHAR_MAX + 2];
+
+
+#else /* }{ */
+
+/*
+** use standard C ctypes
+*/
+
+#include <ctype.h>
+
+
+#define lislalpha(c) (isalpha(c) || (c) == '_')
+#define lislalnum(c) (isalnum(c) || (c) == '_')
+#define lisdigit(c) (isdigit(c))
+#define lisspace(c) (isspace(c))
+#define lisprint(c) (isprint(c))
+#define lisxdigit(c) (isxdigit(c))
+
+#define ltolower(c) (tolower(c))
+
+#endif /* } */
+
+#endif
+
diff --git a/lua/src/ldblib.c b/lua/src/ldblib.c
new file mode 100644
index 000000000..786f6cd95
--- /dev/null
+++ b/lua/src/ldblib.c
@@ -0,0 +1,456 @@
+/*
+** $Id: ldblib.c,v 1.151 2015/11/23 11:29:43 roberto Exp $
+** Interface from Lua to its debug API
+** See Copyright Notice in lua.h
+*/
+
+#define ldblib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+/*
+** The hook table at registry[&HOOKKEY] maps threads to their current
+** hook function. (We only need the unique address of 'HOOKKEY'.)
+*/
+static const int HOOKKEY = 0;
+
+
+/*
+** If L1 != L, L1 can be in any state, and therefore there are no
+** guarantees about its stack space; any push in L1 must be
+** checked.
+*/
+static void checkstack (lua_State *L, lua_State *L1, int n) {
+ if (L != L1 && !lua_checkstack(L1, n))
+ luaL_error(L, "stack overflow");
+}
+
+
+static int db_getregistry (lua_State *L) {
+ lua_pushvalue(L, LUA_REGISTRYINDEX);
+ return 1;
+}
+
+
+static int db_getmetatable (lua_State *L) {
+ luaL_checkany(L, 1);
+ if (!lua_getmetatable(L, 1)) {
+ lua_pushnil(L); /* no metatable */
+ }
+ return 1;
+}
+
+
+static int db_setmetatable (lua_State *L) {
+ int t = lua_type(L, 2);
+ luaL_argcheck(L, t == LUA_TNIL || t == LUA_TTABLE, 2,
+ "nil or table expected");
+ lua_settop(L, 2);
+ lua_setmetatable(L, 1);
+ return 1; /* return 1st argument */
+}
+
+
+static int db_getuservalue (lua_State *L) {
+ if (lua_type(L, 1) != LUA_TUSERDATA)
+ lua_pushnil(L);
+ else
+ lua_getuservalue(L, 1);
+ return 1;
+}
+
+
+static int db_setuservalue (lua_State *L) {
+ luaL_checktype(L, 1, LUA_TUSERDATA);
+ luaL_checkany(L, 2);
+ lua_settop(L, 2);
+ lua_setuservalue(L, 1);
+ return 1;
+}
+
+
+/*
+** Auxiliary function used by several library functions: check for
+** an optional thread as function's first argument and set 'arg' with
+** 1 if this argument is present (so that functions can skip it to
+** access their other arguments)
+*/
+static lua_State *getthread (lua_State *L, int *arg) {
+ if (lua_isthread(L, 1)) {
+ *arg = 1;
+ return lua_tothread(L, 1);
+ }
+ else {
+ *arg = 0;
+ return L; /* function will operate over current thread */
+ }
+}
+
+
+/*
+** Variations of 'lua_settable', used by 'db_getinfo' to put results
+** from 'lua_getinfo' into result table. Key is always a string;
+** value can be a string, an int, or a boolean.
+*/
+static void settabss (lua_State *L, const char *k, const char *v) {
+ lua_pushstring(L, v);
+ lua_setfield(L, -2, k);
+}
+
+static void settabsi (lua_State *L, const char *k, int v) {
+ lua_pushinteger(L, v);
+ lua_setfield(L, -2, k);
+}
+
+static void settabsb (lua_State *L, const char *k, int v) {
+ lua_pushboolean(L, v);
+ lua_setfield(L, -2, k);
+}
+
+
+/*
+** In function 'db_getinfo', the call to 'lua_getinfo' may push
+** results on the stack; later it creates the result table to put
+** these objects. Function 'treatstackoption' puts the result from
+** 'lua_getinfo' on top of the result table so that it can call
+** 'lua_setfield'.
+*/
+static void treatstackoption (lua_State *L, lua_State *L1, const char *fname) {
+ if (L == L1)
+ lua_rotate(L, -2, 1); /* exchange object and table */
+ else
+ lua_xmove(L1, L, 1); /* move object to the "main" stack */
+ lua_setfield(L, -2, fname); /* put object into table */
+}
+
+
+/*
+** Calls 'lua_getinfo' and collects all results in a new table.
+** L1 needs stack space for an optional input (function) plus
+** two optional outputs (function and line table) from function
+** 'lua_getinfo'.
+*/
+static int db_getinfo (lua_State *L) {
+ lua_Debug ar;
+ int arg;
+ lua_State *L1 = getthread(L, &arg);
+ const char *options = luaL_optstring(L, arg+2, "flnStu");
+ checkstack(L, L1, 3);
+ if (lua_isfunction(L, arg + 1)) { /* info about a function? */
+ options = lua_pushfstring(L, ">%s", options); /* add '>' to 'options' */
+ lua_pushvalue(L, arg + 1); /* move function to 'L1' stack */
+ lua_xmove(L, L1, 1);
+ }
+ else { /* stack level */
+ if (!lua_getstack(L1, (int)luaL_checkinteger(L, arg + 1), &ar)) {
+ lua_pushnil(L); /* level out of range */
+ return 1;
+ }
+ }
+ if (!lua_getinfo(L1, options, &ar))
+ return luaL_argerror(L, arg+2, "invalid option");
+ lua_newtable(L); /* table to collect results */
+ if (strchr(options, 'S')) {
+ settabss(L, "source", ar.source);
+ settabss(L, "short_src", ar.short_src);
+ settabsi(L, "linedefined", ar.linedefined);
+ settabsi(L, "lastlinedefined", ar.lastlinedefined);
+ settabss(L, "what", ar.what);
+ }
+ if (strchr(options, 'l'))
+ settabsi(L, "currentline", ar.currentline);
+ if (strchr(options, 'u')) {
+ settabsi(L, "nups", ar.nups);
+ settabsi(L, "nparams", ar.nparams);
+ settabsb(L, "isvararg", ar.isvararg);
+ }
+ if (strchr(options, 'n')) {
+ settabss(L, "name", ar.name);
+ settabss(L, "namewhat", ar.namewhat);
+ }
+ if (strchr(options, 't'))
+ settabsb(L, "istailcall", ar.istailcall);
+ if (strchr(options, 'L'))
+ treatstackoption(L, L1, "activelines");
+ if (strchr(options, 'f'))
+ treatstackoption(L, L1, "func");
+ return 1; /* return table */
+}
+
+
+static int db_getlocal (lua_State *L) {
+ int arg;
+ lua_State *L1 = getthread(L, &arg);
+ lua_Debug ar;
+ const char *name;
+ int nvar = (int)luaL_checkinteger(L, arg + 2); /* local-variable index */
+ if (lua_isfunction(L, arg + 1)) { /* function argument? */
+ lua_pushvalue(L, arg + 1); /* push function */
+ lua_pushstring(L, lua_getlocal(L, NULL, nvar)); /* push local name */
+ return 1; /* return only name (there is no value) */
+ }
+ else { /* stack-level argument */
+ int level = (int)luaL_checkinteger(L, arg + 1);
+ if (!lua_getstack(L1, level, &ar)) /* out of range? */
+ return luaL_argerror(L, arg+1, "level out of range");
+ checkstack(L, L1, 1);
+ name = lua_getlocal(L1, &ar, nvar);
+ if (name) {
+ lua_xmove(L1, L, 1); /* move local value */
+ lua_pushstring(L, name); /* push name */
+ lua_rotate(L, -2, 1); /* re-order */
+ return 2;
+ }
+ else {
+ lua_pushnil(L); /* no name (nor value) */
+ return 1;
+ }
+ }
+}
+
+
+static int db_setlocal (lua_State *L) {
+ int arg;
+ const char *name;
+ lua_State *L1 = getthread(L, &arg);
+ lua_Debug ar;
+ int level = (int)luaL_checkinteger(L, arg + 1);
+ int nvar = (int)luaL_checkinteger(L, arg + 2);
+ if (!lua_getstack(L1, level, &ar)) /* out of range? */
+ return luaL_argerror(L, arg+1, "level out of range");
+ luaL_checkany(L, arg+3);
+ lua_settop(L, arg+3);
+ checkstack(L, L1, 1);
+ lua_xmove(L, L1, 1);
+ name = lua_setlocal(L1, &ar, nvar);
+ if (name == NULL)
+ lua_pop(L1, 1); /* pop value (if not popped by 'lua_setlocal') */
+ lua_pushstring(L, name);
+ return 1;
+}
+
+
+/*
+** get (if 'get' is true) or set an upvalue from a closure
+*/
+static int auxupvalue (lua_State *L, int get) {
+ const char *name;
+ int n = (int)luaL_checkinteger(L, 2); /* upvalue index */
+ luaL_checktype(L, 1, LUA_TFUNCTION); /* closure */
+ name = get ? lua_getupvalue(L, 1, n) : lua_setupvalue(L, 1, n);
+ if (name == NULL) return 0;
+ lua_pushstring(L, name);
+ lua_insert(L, -(get+1)); /* no-op if get is false */
+ return get + 1;
+}
+
+
+static int db_getupvalue (lua_State *L) {
+ return auxupvalue(L, 1);
+}
+
+
+static int db_setupvalue (lua_State *L) {
+ luaL_checkany(L, 3);
+ return auxupvalue(L, 0);
+}
+
+
+/*
+** Check whether a given upvalue from a given closure exists and
+** returns its index
+*/
+static int checkupval (lua_State *L, int argf, int argnup) {
+ int nup = (int)luaL_checkinteger(L, argnup); /* upvalue index */
+ luaL_checktype(L, argf, LUA_TFUNCTION); /* closure */
+ luaL_argcheck(L, (lua_getupvalue(L, argf, nup) != NULL), argnup,
+ "invalid upvalue index");
+ return nup;
+}
+
+
+static int db_upvalueid (lua_State *L) {
+ int n = checkupval(L, 1, 2);
+ lua_pushlightuserdata(L, lua_upvalueid(L, 1, n));
+ return 1;
+}
+
+
+static int db_upvaluejoin (lua_State *L) {
+ int n1 = checkupval(L, 1, 2);
+ int n2 = checkupval(L, 3, 4);
+ luaL_argcheck(L, !lua_iscfunction(L, 1), 1, "Lua function expected");
+ luaL_argcheck(L, !lua_iscfunction(L, 3), 3, "Lua function expected");
+ lua_upvaluejoin(L, 1, n1, 3, n2);
+ return 0;
+}
+
+
+/*
+** Call hook function registered at hook table for the current
+** thread (if there is one)
+*/
+static void hookf (lua_State *L, lua_Debug *ar) {
+ static const char *const hooknames[] =
+ {"call", "return", "line", "count", "tail call"};
+ lua_rawgetp(L, LUA_REGISTRYINDEX, &HOOKKEY);
+ lua_pushthread(L);
+ if (lua_rawget(L, -2) == LUA_TFUNCTION) { /* is there a hook function? */
+ lua_pushstring(L, hooknames[(int)ar->event]); /* push event name */
+ if (ar->currentline >= 0)
+ lua_pushinteger(L, ar->currentline); /* push current line */
+ else lua_pushnil(L);
+ lua_assert(lua_getinfo(L, "lS", ar));
+ lua_call(L, 2, 0); /* call hook function */
+ }
+}
+
+
+/*
+** Convert a string mask (for 'sethook') into a bit mask
+*/
+static int makemask (const char *smask, int count) {
+ int mask = 0;
+ if (strchr(smask, 'c')) mask |= LUA_MASKCALL;
+ if (strchr(smask, 'r')) mask |= LUA_MASKRET;
+ if (strchr(smask, 'l')) mask |= LUA_MASKLINE;
+ if (count > 0) mask |= LUA_MASKCOUNT;
+ return mask;
+}
+
+
+/*
+** Convert a bit mask (for 'gethook') into a string mask
+*/
+static char *unmakemask (int mask, char *smask) {
+ int i = 0;
+ if (mask & LUA_MASKCALL) smask[i++] = 'c';
+ if (mask & LUA_MASKRET) smask[i++] = 'r';
+ if (mask & LUA_MASKLINE) smask[i++] = 'l';
+ smask[i] = '\0';
+ return smask;
+}
+
+
+static int db_sethook (lua_State *L) {
+ int arg, mask, count;
+ lua_Hook func;
+ lua_State *L1 = getthread(L, &arg);
+ if (lua_isnoneornil(L, arg+1)) { /* no hook? */
+ lua_settop(L, arg+1);
+ func = NULL; mask = 0; count = 0; /* turn off hooks */
+ }
+ else {
+ const char *smask = luaL_checkstring(L, arg+2);
+ luaL_checktype(L, arg+1, LUA_TFUNCTION);
+ count = (int)luaL_optinteger(L, arg + 3, 0);
+ func = hookf; mask = makemask(smask, count);
+ }
+ if (lua_rawgetp(L, LUA_REGISTRYINDEX, &HOOKKEY) == LUA_TNIL) {
+ lua_createtable(L, 0, 2); /* create a hook table */
+ lua_pushvalue(L, -1);
+ lua_rawsetp(L, LUA_REGISTRYINDEX, &HOOKKEY); /* set it in position */
+ lua_pushstring(L, "k");
+ lua_setfield(L, -2, "__mode"); /** hooktable.__mode = "k" */
+ lua_pushvalue(L, -1);
+ lua_setmetatable(L, -2); /* setmetatable(hooktable) = hooktable */
+ }
+ checkstack(L, L1, 1);
+ lua_pushthread(L1); lua_xmove(L1, L, 1); /* key (thread) */
+ lua_pushvalue(L, arg + 1); /* value (hook function) */
+ lua_rawset(L, -3); /* hooktable[L1] = new Lua hook */
+ lua_sethook(L1, func, mask, count);
+ return 0;
+}
+
+
+static int db_gethook (lua_State *L) {
+ int arg;
+ lua_State *L1 = getthread(L, &arg);
+ char buff[5];
+ int mask = lua_gethookmask(L1);
+ lua_Hook hook = lua_gethook(L1);
+ if (hook == NULL) /* no hook? */
+ lua_pushnil(L);
+ else if (hook != hookf) /* external hook? */
+ lua_pushliteral(L, "external hook");
+ else { /* hook table must exist */
+ lua_rawgetp(L, LUA_REGISTRYINDEX, &HOOKKEY);
+ checkstack(L, L1, 1);
+ lua_pushthread(L1); lua_xmove(L1, L, 1);
+ lua_rawget(L, -2); /* 1st result = hooktable[L1] */
+ lua_remove(L, -2); /* remove hook table */
+ }
+ lua_pushstring(L, unmakemask(mask, buff)); /* 2nd result = mask */
+ lua_pushinteger(L, lua_gethookcount(L1)); /* 3rd result = count */
+ return 3;
+}
+
+
+static int db_debug (lua_State *L) {
+ for (;;) {
+ char buffer[250];
+ lua_writestringerror("%s", "lua_debug> ");
+ if (fgets(buffer, sizeof(buffer), stdin) == 0 ||
+ strcmp(buffer, "cont\n") == 0)
+ return 0;
+ if (luaL_loadbuffer(L, buffer, strlen(buffer), "=(debug command)") ||
+ lua_pcall(L, 0, 0, 0))
+ lua_writestringerror("%s\n", lua_tostring(L, -1));
+ lua_settop(L, 0); /* remove eventual returns */
+ }
+}
+
+
+static int db_traceback (lua_State *L) {
+ int arg;
+ lua_State *L1 = getthread(L, &arg);
+ const char *msg = lua_tostring(L, arg + 1);
+ if (msg == NULL && !lua_isnoneornil(L, arg + 1)) /* non-string 'msg'? */
+ lua_pushvalue(L, arg + 1); /* return it untouched */
+ else {
+ int level = (int)luaL_optinteger(L, arg + 2, (L == L1) ? 1 : 0);
+ luaL_traceback(L, L1, msg, level);
+ }
+ return 1;
+}
+
+
+static const luaL_Reg dblib[] = {
+ {"debug", db_debug},
+ {"getuservalue", db_getuservalue},
+ {"gethook", db_gethook},
+ {"getinfo", db_getinfo},
+ {"getlocal", db_getlocal},
+ {"getregistry", db_getregistry},
+ {"getmetatable", db_getmetatable},
+ {"getupvalue", db_getupvalue},
+ {"upvaluejoin", db_upvaluejoin},
+ {"upvalueid", db_upvalueid},
+ {"setuservalue", db_setuservalue},
+ {"sethook", db_sethook},
+ {"setlocal", db_setlocal},
+ {"setmetatable", db_setmetatable},
+ {"setupvalue", db_setupvalue},
+ {"traceback", db_traceback},
+ {NULL, NULL}
+};
+
+
+LUAMOD_API int luaopen_debug (lua_State *L) {
+ luaL_newlib(L, dblib);
+ return 1;
+}
+
diff --git a/lua/src/ldebug.c b/lua/src/ldebug.c
new file mode 100644
index 000000000..239affb76
--- /dev/null
+++ b/lua/src/ldebug.c
@@ -0,0 +1,698 @@
+/*
+** $Id: ldebug.c,v 2.121 2016/10/19 12:32:10 roberto Exp $
+** Debug Interface
+** See Copyright Notice in lua.h
+*/
+
+#define ldebug_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stdarg.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lapi.h"
+#include "lcode.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+#include "lvm.h"
+
+
+
+#define noLuaClosure(f) ((f) == NULL || (f)->c.tt == LUA_TCCL)
+
+
+/* Active Lua function (given call info) */
+#define ci_func(ci) (clLvalue((ci)->func))
+
+
+static const char *funcnamefromcode (lua_State *L, CallInfo *ci,
+ const char **name);
+
+
+static int currentpc (CallInfo *ci) {
+ lua_assert(isLua(ci));
+ return pcRel(ci->u.l.savedpc, ci_func(ci)->p);
+}
+
+
+static int currentline (CallInfo *ci) {
+ return getfuncline(ci_func(ci)->p, currentpc(ci));
+}
+
+
+/*
+** If function yielded, its 'func' can be in the 'extra' field. The
+** next function restores 'func' to its correct value for debugging
+** purposes. (It exchanges 'func' and 'extra'; so, when called again,
+** after debugging, it also "re-restores" ** 'func' to its altered value.
+*/
+static void swapextra (lua_State *L) {
+ if (L->status == LUA_YIELD) {
+ CallInfo *ci = L->ci; /* get function that yielded */
+ StkId temp = ci->func; /* exchange its 'func' and 'extra' values */
+ ci->func = restorestack(L, ci->extra);
+ ci->extra = savestack(L, temp);
+ }
+}
+
+
+/*
+** This function can be called asynchronously (e.g. during a signal).
+** Fields 'oldpc', 'basehookcount', and 'hookcount' (set by
+** 'resethookcount') are for debug only, and it is no problem if they
+** get arbitrary values (causes at most one wrong hook call). 'hookmask'
+** is an atomic value. We assume that pointers are atomic too (e.g., gcc
+** ensures that for all platforms where it runs). Moreover, 'hook' is
+** always checked before being called (see 'luaD_hook').
+*/
+LUA_API void lua_sethook (lua_State *L, lua_Hook func, int mask, int count) {
+ if (func == NULL || mask == 0) { /* turn off hooks? */
+ mask = 0;
+ func = NULL;
+ }
+ if (isLua(L->ci))
+ L->oldpc = L->ci->u.l.savedpc;
+ L->hook = func;
+ L->basehookcount = count;
+ resethookcount(L);
+ L->hookmask = cast_byte(mask);
+}
+
+
+LUA_API lua_Hook lua_gethook (lua_State *L) {
+ return L->hook;
+}
+
+
+LUA_API int lua_gethookmask (lua_State *L) {
+ return L->hookmask;
+}
+
+
+LUA_API int lua_gethookcount (lua_State *L) {
+ return L->basehookcount;
+}
+
+
+LUA_API int lua_getstack (lua_State *L, int level, lua_Debug *ar) {
+ int status;
+ CallInfo *ci;
+ if (level < 0) return 0; /* invalid (negative) level */
+ lua_lock(L);
+ for (ci = L->ci; level > 0 && ci != &L->base_ci; ci = ci->previous)
+ level--;
+ if (level == 0 && ci != &L->base_ci) { /* level found? */
+ status = 1;
+ ar->i_ci = ci;
+ }
+ else status = 0; /* no such level */
+ lua_unlock(L);
+ return status;
+}
+
+
+static const char *upvalname (Proto *p, int uv) {
+ TString *s = check_exp(uv < p->sizeupvalues, p->upvalues[uv].name);
+ if (s == NULL) return "?";
+ else return getstr(s);
+}
+
+
+static const char *findvararg (CallInfo *ci, int n, StkId *pos) {
+ int nparams = clLvalue(ci->func)->p->numparams;
+ if (n >= cast_int(ci->u.l.base - ci->func) - nparams)
+ return NULL; /* no such vararg */
+ else {
+ *pos = ci->func + nparams + n;
+ return "(*vararg)"; /* generic name for any vararg */
+ }
+}
+
+
+static const char *findlocal (lua_State *L, CallInfo *ci, int n,
+ StkId *pos) {
+ const char *name = NULL;
+ StkId base;
+ if (isLua(ci)) {
+ if (n < 0) /* access to vararg values? */
+ return findvararg(ci, -n, pos);
+ else {
+ base = ci->u.l.base;
+ name = luaF_getlocalname(ci_func(ci)->p, n, currentpc(ci));
+ }
+ }
+ else
+ base = ci->func + 1;
+ if (name == NULL) { /* no 'standard' name? */
+ StkId limit = (ci == L->ci) ? L->top : ci->next->func;
+ if (limit - base >= n && n > 0) /* is 'n' inside 'ci' stack? */
+ name = "(*temporary)"; /* generic name for any valid slot */
+ else
+ return NULL; /* no name */
+ }
+ *pos = base + (n - 1);
+ return name;
+}
+
+
+LUA_API const char *lua_getlocal (lua_State *L, const lua_Debug *ar, int n) {
+ const char *name;
+ lua_lock(L);
+ swapextra(L);
+ if (ar == NULL) { /* information about non-active function? */
+ if (!isLfunction(L->top - 1)) /* not a Lua function? */
+ name = NULL;
+ else /* consider live variables at function start (parameters) */
+ name = luaF_getlocalname(clLvalue(L->top - 1)->p, n, 0);
+ }
+ else { /* active function; get information through 'ar' */
+ StkId pos = NULL; /* to avoid warnings */
+ name = findlocal(L, ar->i_ci, n, &pos);
+ if (name) {
+ setobj2s(L, L->top, pos);
+ api_incr_top(L);
+ }
+ }
+ swapextra(L);
+ lua_unlock(L);
+ return name;
+}
+
+
+LUA_API const char *lua_setlocal (lua_State *L, const lua_Debug *ar, int n) {
+ StkId pos = NULL; /* to avoid warnings */
+ const char *name;
+ lua_lock(L);
+ swapextra(L);
+ name = findlocal(L, ar->i_ci, n, &pos);
+ if (name) {
+ setobjs2s(L, pos, L->top - 1);
+ L->top--; /* pop value */
+ }
+ swapextra(L);
+ lua_unlock(L);
+ return name;
+}
+
+
+static void funcinfo (lua_Debug *ar, Closure *cl) {
+ if (noLuaClosure(cl)) {
+ ar->source = "=[C]";
+ ar->linedefined = -1;
+ ar->lastlinedefined = -1;
+ ar->what = "C";
+ }
+ else {
+ Proto *p = cl->l.p;
+ ar->source = p->source ? getstr(p->source) : "=?";
+ ar->linedefined = p->linedefined;
+ ar->lastlinedefined = p->lastlinedefined;
+ ar->what = (ar->linedefined == 0) ? "main" : "Lua";
+ }
+ luaO_chunkid(ar->short_src, ar->source, LUA_IDSIZE);
+}
+
+
+static void collectvalidlines (lua_State *L, Closure *f) {
+ if (noLuaClosure(f)) {
+ setnilvalue(L->top);
+ api_incr_top(L);
+ }
+ else {
+ int i;
+ TValue v;
+ int *lineinfo = f->l.p->lineinfo;
+ Table *t = luaH_new(L); /* new table to store active lines */
+ sethvalue(L, L->top, t); /* push it on stack */
+ api_incr_top(L);
+ setbvalue(&v, 1); /* boolean 'true' to be the value of all indices */
+ for (i = 0; i < f->l.p->sizelineinfo; i++) /* for all lines with code */
+ luaH_setint(L, t, lineinfo[i], &v); /* table[line] = true */
+ }
+}
+
+
+static const char *getfuncname (lua_State *L, CallInfo *ci, const char **name) {
+ if (ci == NULL) /* no 'ci'? */
+ return NULL; /* no info */
+ else if (ci->callstatus & CIST_FIN) { /* is this a finalizer? */
+ *name = "__gc";
+ return "metamethod"; /* report it as such */
+ }
+ /* calling function is a known Lua function? */
+ else if (!(ci->callstatus & CIST_TAIL) && isLua(ci->previous))
+ return funcnamefromcode(L, ci->previous, name);
+ else return NULL; /* no way to find a name */
+}
+
+
+static int auxgetinfo (lua_State *L, const char *what, lua_Debug *ar,
+ Closure *f, CallInfo *ci) {
+ int status = 1;
+ for (; *what; what++) {
+ switch (*what) {
+ case 'S': {
+ funcinfo(ar, f);
+ break;
+ }
+ case 'l': {
+ ar->currentline = (ci && isLua(ci)) ? currentline(ci) : -1;
+ break;
+ }
+ case 'u': {
+ ar->nups = (f == NULL) ? 0 : f->c.nupvalues;
+ if (noLuaClosure(f)) {
+ ar->isvararg = 1;
+ ar->nparams = 0;
+ }
+ else {
+ ar->isvararg = f->l.p->is_vararg;
+ ar->nparams = f->l.p->numparams;
+ }
+ break;
+ }
+ case 't': {
+ ar->istailcall = (ci) ? ci->callstatus & CIST_TAIL : 0;
+ break;
+ }
+ case 'n': {
+ ar->namewhat = getfuncname(L, ci, &ar->name);
+ if (ar->namewhat == NULL) {
+ ar->namewhat = ""; /* not found */
+ ar->name = NULL;
+ }
+ break;
+ }
+ case 'L':
+ case 'f': /* handled by lua_getinfo */
+ break;
+ default: status = 0; /* invalid option */
+ }
+ }
+ return status;
+}
+
+
+LUA_API int lua_getinfo (lua_State *L, const char *what, lua_Debug *ar) {
+ int status;
+ Closure *cl;
+ CallInfo *ci;
+ StkId func;
+ lua_lock(L);
+ swapextra(L);
+ if (*what == '>') {
+ ci = NULL;
+ func = L->top - 1;
+ api_check(L, ttisfunction(func), "function expected");
+ what++; /* skip the '>' */
+ L->top--; /* pop function */
+ }
+ else {
+ ci = ar->i_ci;
+ func = ci->func;
+ lua_assert(ttisfunction(ci->func));
+ }
+ cl = ttisclosure(func) ? clvalue(func) : NULL;
+ status = auxgetinfo(L, what, ar, cl, ci);
+ if (strchr(what, 'f')) {
+ setobjs2s(L, L->top, func);
+ api_incr_top(L);
+ }
+ swapextra(L); /* correct before option 'L', which can raise a mem. error */
+ if (strchr(what, 'L'))
+ collectvalidlines(L, cl);
+ lua_unlock(L);
+ return status;
+}
+
+
+/*
+** {======================================================
+** Symbolic Execution
+** =======================================================
+*/
+
+static const char *getobjname (Proto *p, int lastpc, int reg,
+ const char **name);
+
+
+/*
+** find a "name" for the RK value 'c'
+*/
+static void kname (Proto *p, int pc, int c, const char **name) {
+ if (ISK(c)) { /* is 'c' a constant? */
+ TValue *kvalue = &p->k[INDEXK(c)];
+ if (ttisstring(kvalue)) { /* literal constant? */
+ *name = svalue(kvalue); /* it is its own name */
+ return;
+ }
+ /* else no reasonable name found */
+ }
+ else { /* 'c' is a register */
+ const char *what = getobjname(p, pc, c, name); /* search for 'c' */
+ if (what && *what == 'c') { /* found a constant name? */
+ return; /* 'name' already filled */
+ }
+ /* else no reasonable name found */
+ }
+ *name = "?"; /* no reasonable name found */
+}
+
+
+static int filterpc (int pc, int jmptarget) {
+ if (pc < jmptarget) /* is code conditional (inside a jump)? */
+ return -1; /* cannot know who sets that register */
+ else return pc; /* current position sets that register */
+}
+
+
+/*
+** try to find last instruction before 'lastpc' that modified register 'reg'
+*/
+static int findsetreg (Proto *p, int lastpc, int reg) {
+ int pc;
+ int setreg = -1; /* keep last instruction that changed 'reg' */
+ int jmptarget = 0; /* any code before this address is conditional */
+ for (pc = 0; pc < lastpc; pc++) {
+ Instruction i = p->code[pc];
+ OpCode op = GET_OPCODE(i);
+ int a = GETARG_A(i);
+ switch (op) {
+ case OP_LOADNIL: {
+ int b = GETARG_B(i);
+ if (a <= reg && reg <= a + b) /* set registers from 'a' to 'a+b' */
+ setreg = filterpc(pc, jmptarget);
+ break;
+ }
+ case OP_TFORCALL: {
+ if (reg >= a + 2) /* affect all regs above its base */
+ setreg = filterpc(pc, jmptarget);
+ break;
+ }
+ case OP_CALL:
+ case OP_TAILCALL: {
+ if (reg >= a) /* affect all registers above base */
+ setreg = filterpc(pc, jmptarget);
+ break;
+ }
+ case OP_JMP: {
+ int b = GETARG_sBx(i);
+ int dest = pc + 1 + b;
+ /* jump is forward and do not skip 'lastpc'? */
+ if (pc < dest && dest <= lastpc) {
+ if (dest > jmptarget)
+ jmptarget = dest; /* update 'jmptarget' */
+ }
+ break;
+ }
+ default:
+ if (testAMode(op) && reg == a) /* any instruction that set A */
+ setreg = filterpc(pc, jmptarget);
+ break;
+ }
+ }
+ return setreg;
+}
+
+
+static const char *getobjname (Proto *p, int lastpc, int reg,
+ const char **name) {
+ int pc;
+ *name = luaF_getlocalname(p, reg + 1, lastpc);
+ if (*name) /* is a local? */
+ return "local";
+ /* else try symbolic execution */
+ pc = findsetreg(p, lastpc, reg);
+ if (pc != -1) { /* could find instruction? */
+ Instruction i = p->code[pc];
+ OpCode op = GET_OPCODE(i);
+ switch (op) {
+ case OP_MOVE: {
+ int b = GETARG_B(i); /* move from 'b' to 'a' */
+ if (b < GETARG_A(i))
+ return getobjname(p, pc, b, name); /* get name for 'b' */
+ break;
+ }
+ case OP_GETTABUP:
+ case OP_GETTABLE: {
+ int k = GETARG_C(i); /* key index */
+ int t = GETARG_B(i); /* table index */
+ const char *vn = (op == OP_GETTABLE) /* name of indexed variable */
+ ? luaF_getlocalname(p, t + 1, pc)
+ : upvalname(p, t);
+ kname(p, pc, k, name);
+ return (vn && strcmp(vn, LUA_ENV) == 0) ? "global" : "field";
+ }
+ case OP_GETUPVAL: {
+ *name = upvalname(p, GETARG_B(i));
+ return "upvalue";
+ }
+ case OP_LOADK:
+ case OP_LOADKX: {
+ int b = (op == OP_LOADK) ? GETARG_Bx(i)
+ : GETARG_Ax(p->code[pc + 1]);
+ if (ttisstring(&p->k[b])) {
+ *name = svalue(&p->k[b]);
+ return "constant";
+ }
+ break;
+ }
+ case OP_SELF: {
+ int k = GETARG_C(i); /* key index */
+ kname(p, pc, k, name);
+ return "method";
+ }
+ default: break; /* go through to return NULL */
+ }
+ }
+ return NULL; /* could not find reasonable name */
+}
+
+
+/*
+** Try to find a name for a function based on the code that called it.
+** (Only works when function was called by a Lua function.)
+** Returns what the name is (e.g., "for iterator", "method",
+** "metamethod") and sets '*name' to point to the name.
+*/
+static const char *funcnamefromcode (lua_State *L, CallInfo *ci,
+ const char **name) {
+ TMS tm = (TMS)0; /* (initial value avoids warnings) */
+ Proto *p = ci_func(ci)->p; /* calling function */
+ int pc = currentpc(ci); /* calling instruction index */
+ Instruction i = p->code[pc]; /* calling instruction */
+ if (ci->callstatus & CIST_HOOKED) { /* was it called inside a hook? */
+ *name = "?";
+ return "hook";
+ }
+ switch (GET_OPCODE(i)) {
+ case OP_CALL:
+ case OP_TAILCALL:
+ return getobjname(p, pc, GETARG_A(i), name); /* get function name */
+ case OP_TFORCALL: { /* for iterator */
+ *name = "for iterator";
+ return "for iterator";
+ }
+ /* other instructions can do calls through metamethods */
+ case OP_SELF: case OP_GETTABUP: case OP_GETTABLE:
+ tm = TM_INDEX;
+ break;
+ case OP_SETTABUP: case OP_SETTABLE:
+ tm = TM_NEWINDEX;
+ break;
+ case OP_ADD: case OP_SUB: case OP_MUL: case OP_MOD:
+ case OP_POW: case OP_DIV: case OP_IDIV: case OP_BAND:
+ case OP_BOR: case OP_BXOR: case OP_SHL: case OP_SHR: {
+ int offset = cast_int(GET_OPCODE(i)) - cast_int(OP_ADD); /* ORDER OP */
+ tm = cast(TMS, offset + cast_int(TM_ADD)); /* ORDER TM */
+ break;
+ }
+ case OP_UNM: tm = TM_UNM; break;
+ case OP_BNOT: tm = TM_BNOT; break;
+ case OP_LEN: tm = TM_LEN; break;
+ case OP_CONCAT: tm = TM_CONCAT; break;
+ case OP_EQ: tm = TM_EQ; break;
+ case OP_LT: tm = TM_LT; break;
+ case OP_LE: tm = TM_LE; break;
+ default:
+ return NULL; /* cannot find a reasonable name */
+ }
+ *name = getstr(G(L)->tmname[tm]);
+ return "metamethod";
+}
+
+/* }====================================================== */
+
+
+
+/*
+** The subtraction of two potentially unrelated pointers is
+** not ISO C, but it should not crash a program; the subsequent
+** checks are ISO C and ensure a correct result.
+*/
+static int isinstack (CallInfo *ci, const TValue *o) {
+ ptrdiff_t i = o - ci->u.l.base;
+ return (0 <= i && i < (ci->top - ci->u.l.base) && ci->u.l.base + i == o);
+}
+
+
+/*
+** Checks whether value 'o' came from an upvalue. (That can only happen
+** with instructions OP_GETTABUP/OP_SETTABUP, which operate directly on
+** upvalues.)
+*/
+static const char *getupvalname (CallInfo *ci, const TValue *o,
+ const char **name) {
+ LClosure *c = ci_func(ci);
+ int i;
+ for (i = 0; i < c->nupvalues; i++) {
+ if (c->upvals[i]->v == o) {
+ *name = upvalname(c->p, i);
+ return "upvalue";
+ }
+ }
+ return NULL;
+}
+
+
+static const char *varinfo (lua_State *L, const TValue *o) {
+ const char *name = NULL; /* to avoid warnings */
+ CallInfo *ci = L->ci;
+ const char *kind = NULL;
+ if (isLua(ci)) {
+ kind = getupvalname(ci, o, &name); /* check whether 'o' is an upvalue */
+ if (!kind && isinstack(ci, o)) /* no? try a register */
+ kind = getobjname(ci_func(ci)->p, currentpc(ci),
+ cast_int(o - ci->u.l.base), &name);
+ }
+ return (kind) ? luaO_pushfstring(L, " (%s '%s')", kind, name) : "";
+}
+
+
+l_noret luaG_typeerror (lua_State *L, const TValue *o, const char *op) {
+ const char *t = luaT_objtypename(L, o);
+ luaG_runerror(L, "attempt to %s a %s value%s", op, t, varinfo(L, o));
+}
+
+
+l_noret luaG_concaterror (lua_State *L, const TValue *p1, const TValue *p2) {
+ if (ttisstring(p1) || cvt2str(p1)) p1 = p2;
+ luaG_typeerror(L, p1, "concatenate");
+}
+
+
+l_noret luaG_opinterror (lua_State *L, const TValue *p1,
+ const TValue *p2, const char *msg) {
+ lua_Number temp;
+ if (!tonumber(p1, &temp)) /* first operand is wrong? */
+ p2 = p1; /* now second is wrong */
+ luaG_typeerror(L, p2, msg);
+}
+
+
+/*
+** Error when both values are convertible to numbers, but not to integers
+*/
+l_noret luaG_tointerror (lua_State *L, const TValue *p1, const TValue *p2) {
+ lua_Integer temp;
+ if (!tointeger(p1, &temp))
+ p2 = p1;
+ luaG_runerror(L, "number%s has no integer representation", varinfo(L, p2));
+}
+
+
+l_noret luaG_ordererror (lua_State *L, const TValue *p1, const TValue *p2) {
+ const char *t1 = luaT_objtypename(L, p1);
+ const char *t2 = luaT_objtypename(L, p2);
+ if (strcmp(t1, t2) == 0)
+ luaG_runerror(L, "attempt to compare two %s values", t1);
+ else
+ luaG_runerror(L, "attempt to compare %s with %s", t1, t2);
+}
+
+
+/* add src:line information to 'msg' */
+const char *luaG_addinfo (lua_State *L, const char *msg, TString *src,
+ int line) {
+ char buff[LUA_IDSIZE];
+ if (src)
+ luaO_chunkid(buff, getstr(src), LUA_IDSIZE);
+ else { /* no source available; use "?" instead */
+ buff[0] = '?'; buff[1] = '\0';
+ }
+ return luaO_pushfstring(L, "%s:%d: %s", buff, line, msg);
+}
+
+
+l_noret luaG_errormsg (lua_State *L) {
+ if (L->errfunc != 0) { /* is there an error handling function? */
+ StkId errfunc = restorestack(L, L->errfunc);
+ setobjs2s(L, L->top, L->top - 1); /* move argument */
+ setobjs2s(L, L->top - 1, errfunc); /* push function */
+ L->top++; /* assume EXTRA_STACK */
+ luaD_callnoyield(L, L->top - 2, 1); /* call it */
+ }
+ luaD_throw(L, LUA_ERRRUN);
+}
+
+
+l_noret luaG_runerror (lua_State *L, const char *fmt, ...) {
+ CallInfo *ci = L->ci;
+ const char *msg;
+ va_list argp;
+ va_start(argp, fmt);
+ msg = luaO_pushvfstring(L, fmt, argp); /* format message */
+ va_end(argp);
+ if (isLua(ci)) /* if Lua function, add source:line information */
+ luaG_addinfo(L, msg, ci_func(ci)->p->source, currentline(ci));
+ luaG_errormsg(L);
+}
+
+
+void luaG_traceexec (lua_State *L) {
+ CallInfo *ci = L->ci;
+ lu_byte mask = L->hookmask;
+ int counthook = (--L->hookcount == 0 && (mask & LUA_MASKCOUNT));
+ if (counthook)
+ resethookcount(L); /* reset count */
+ else if (!(mask & LUA_MASKLINE))
+ return; /* no line hook and count != 0; nothing to be done */
+ if (ci->callstatus & CIST_HOOKYIELD) { /* called hook last time? */
+ ci->callstatus &= ~CIST_HOOKYIELD; /* erase mark */
+ return; /* do not call hook again (VM yielded, so it did not move) */
+ }
+ if (counthook)
+ luaD_hook(L, LUA_HOOKCOUNT, -1); /* call count hook */
+ if (mask & LUA_MASKLINE) {
+ Proto *p = ci_func(ci)->p;
+ int npc = pcRel(ci->u.l.savedpc, p);
+ int newline = getfuncline(p, npc);
+ if (npc == 0 || /* call linehook when enter a new function, */
+ ci->u.l.savedpc <= L->oldpc || /* when jump back (loop), or when */
+ newline != getfuncline(p, pcRel(L->oldpc, p))) /* enter a new line */
+ luaD_hook(L, LUA_HOOKLINE, newline); /* call line hook */
+ }
+ L->oldpc = ci->u.l.savedpc;
+ if (L->status == LUA_YIELD) { /* did hook yield? */
+ if (counthook)
+ L->hookcount = 1; /* undo decrement to zero */
+ ci->u.l.savedpc--; /* undo increment (resume will increment it again) */
+ ci->callstatus |= CIST_HOOKYIELD; /* mark that it yielded */
+ ci->func = L->top - 1; /* protect stack below results */
+ luaD_throw(L, LUA_YIELD);
+ }
+}
+
diff --git a/lua/src/ldebug.h b/lua/src/ldebug.h
new file mode 100644
index 000000000..0e31546b1
--- /dev/null
+++ b/lua/src/ldebug.h
@@ -0,0 +1,39 @@
+/*
+** $Id: ldebug.h,v 2.14 2015/05/22 17:45:56 roberto Exp $
+** Auxiliary functions from Debug Interface module
+** See Copyright Notice in lua.h
+*/
+
+#ifndef ldebug_h
+#define ldebug_h
+
+
+#include "lstate.h"
+
+
+#define pcRel(pc, p) (cast(int, (pc) - (p)->code) - 1)
+
+#define getfuncline(f,pc) (((f)->lineinfo) ? (f)->lineinfo[pc] : -1)
+
+#define resethookcount(L) (L->hookcount = L->basehookcount)
+
+
+LUAI_FUNC l_noret luaG_typeerror (lua_State *L, const TValue *o,
+ const char *opname);
+LUAI_FUNC l_noret luaG_concaterror (lua_State *L, const TValue *p1,
+ const TValue *p2);
+LUAI_FUNC l_noret luaG_opinterror (lua_State *L, const TValue *p1,
+ const TValue *p2,
+ const char *msg);
+LUAI_FUNC l_noret luaG_tointerror (lua_State *L, const TValue *p1,
+ const TValue *p2);
+LUAI_FUNC l_noret luaG_ordererror (lua_State *L, const TValue *p1,
+ const TValue *p2);
+LUAI_FUNC l_noret luaG_runerror (lua_State *L, const char *fmt, ...);
+LUAI_FUNC const char *luaG_addinfo (lua_State *L, const char *msg,
+ TString *src, int line);
+LUAI_FUNC l_noret luaG_errormsg (lua_State *L);
+LUAI_FUNC void luaG_traceexec (lua_State *L);
+
+
+#endif
diff --git a/lua/src/ldo.c b/lua/src/ldo.c
new file mode 100644
index 000000000..90b695fb0
--- /dev/null
+++ b/lua/src/ldo.c
@@ -0,0 +1,802 @@
+/*
+** $Id: ldo.c,v 2.157 2016/12/13 15:52:21 roberto Exp $
+** Stack and Call structure of Lua
+** See Copyright Notice in lua.h
+*/
+
+#define ldo_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <setjmp.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lapi.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lparser.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+#include "lundump.h"
+#include "lvm.h"
+#include "lzio.h"
+
+
+
+#define errorstatus(s) ((s) > LUA_YIELD)
+
+
+/*
+** {======================================================
+** Error-recovery functions
+** =======================================================
+*/
+
+/*
+** LUAI_THROW/LUAI_TRY define how Lua does exception handling. By
+** default, Lua handles errors with exceptions when compiling as
+** C++ code, with _longjmp/_setjmp when asked to use them, and with
+** longjmp/setjmp otherwise.
+*/
+#if !defined(LUAI_THROW) /* { */
+
+#if defined(__cplusplus) && !defined(LUA_USE_LONGJMP) /* { */
+
+/* C++ exceptions */
+#define LUAI_THROW(L,c) throw(c)
+#define LUAI_TRY(L,c,a) \
+ try { a } catch(...) { if ((c)->status == 0) (c)->status = -1; }
+#define luai_jmpbuf int /* dummy variable */
+
+#elif defined(LUA_USE_POSIX) /* }{ */
+
+/* in POSIX, try _longjmp/_setjmp (more efficient) */
+#define LUAI_THROW(L,c) _longjmp((c)->b, 1)
+#define LUAI_TRY(L,c,a) if (_setjmp((c)->b) == 0) { a }
+#define luai_jmpbuf jmp_buf
+
+#else /* }{ */
+
+/* ISO C handling with long jumps */
+#define LUAI_THROW(L,c) longjmp((c)->b, 1)
+#define LUAI_TRY(L,c,a) if (setjmp((c)->b) == 0) { a }
+#define luai_jmpbuf jmp_buf
+
+#endif /* } */
+
+#endif /* } */
+
+
+
+/* chain list of long jump buffers */
+struct lua_longjmp {
+ struct lua_longjmp *previous;
+ luai_jmpbuf b;
+ volatile int status; /* error code */
+};
+
+
+static void seterrorobj (lua_State *L, int errcode, StkId oldtop) {
+ switch (errcode) {
+ case LUA_ERRMEM: { /* memory error? */
+ setsvalue2s(L, oldtop, G(L)->memerrmsg); /* reuse preregistered msg. */
+ break;
+ }
+ case LUA_ERRERR: {
+ setsvalue2s(L, oldtop, luaS_newliteral(L, "error in error handling"));
+ break;
+ }
+ default: {
+ setobjs2s(L, oldtop, L->top - 1); /* error message on current top */
+ break;
+ }
+ }
+ L->top = oldtop + 1;
+}
+
+
+l_noret luaD_throw (lua_State *L, int errcode) {
+ if (L->errorJmp) { /* thread has an error handler? */
+ L->errorJmp->status = errcode; /* set status */
+ LUAI_THROW(L, L->errorJmp); /* jump to it */
+ }
+ else { /* thread has no error handler */
+ global_State *g = G(L);
+ L->status = cast_byte(errcode); /* mark it as dead */
+ if (g->mainthread->errorJmp) { /* main thread has a handler? */
+ setobjs2s(L, g->mainthread->top++, L->top - 1); /* copy error obj. */
+ luaD_throw(g->mainthread, errcode); /* re-throw in main thread */
+ }
+ else { /* no handler at all; abort */
+ if (g->panic) { /* panic function? */
+ seterrorobj(L, errcode, L->top); /* assume EXTRA_STACK */
+ if (L->ci->top < L->top)
+ L->ci->top = L->top; /* pushing msg. can break this invariant */
+ lua_unlock(L);
+ g->panic(L); /* call panic function (last chance to jump out) */
+ }
+ abort();
+ }
+ }
+}
+
+
+int luaD_rawrunprotected (lua_State *L, Pfunc f, void *ud) {
+ unsigned short oldnCcalls = L->nCcalls;
+ struct lua_longjmp lj;
+ lj.status = LUA_OK;
+ lj.previous = L->errorJmp; /* chain new error handler */
+ L->errorJmp = &lj;
+ LUAI_TRY(L, &lj,
+ (*f)(L, ud);
+ );
+ L->errorJmp = lj.previous; /* restore old error handler */
+ L->nCcalls = oldnCcalls;
+ return lj.status;
+}
+
+/* }====================================================== */
+
+
+/*
+** {==================================================================
+** Stack reallocation
+** ===================================================================
+*/
+static void correctstack (lua_State *L, TValue *oldstack) {
+ CallInfo *ci;
+ UpVal *up;
+ L->top = (L->top - oldstack) + L->stack;
+ for (up = L->openupval; up != NULL; up = up->u.open.next)
+ up->v = (up->v - oldstack) + L->stack;
+ for (ci = L->ci; ci != NULL; ci = ci->previous) {
+ ci->top = (ci->top - oldstack) + L->stack;
+ ci->func = (ci->func - oldstack) + L->stack;
+ if (isLua(ci))
+ ci->u.l.base = (ci->u.l.base - oldstack) + L->stack;
+ }
+}
+
+
+/* some space for error handling */
+#define ERRORSTACKSIZE (LUAI_MAXSTACK + 200)
+
+
+void luaD_reallocstack (lua_State *L, int newsize) {
+ TValue *oldstack = L->stack;
+ int lim = L->stacksize;
+ lua_assert(newsize <= LUAI_MAXSTACK || newsize == ERRORSTACKSIZE);
+ lua_assert(L->stack_last - L->stack == L->stacksize - EXTRA_STACK);
+ luaM_reallocvector(L, L->stack, L->stacksize, newsize, TValue);
+ for (; lim < newsize; lim++)
+ setnilvalue(L->stack + lim); /* erase new segment */
+ L->stacksize = newsize;
+ L->stack_last = L->stack + newsize - EXTRA_STACK;
+ correctstack(L, oldstack);
+}
+
+
+void luaD_growstack (lua_State *L, int n) {
+ int size = L->stacksize;
+ if (size > LUAI_MAXSTACK) /* error after extra size? */
+ luaD_throw(L, LUA_ERRERR);
+ else {
+ int needed = cast_int(L->top - L->stack) + n + EXTRA_STACK;
+ int newsize = 2 * size;
+ if (newsize > LUAI_MAXSTACK) newsize = LUAI_MAXSTACK;
+ if (newsize < needed) newsize = needed;
+ if (newsize > LUAI_MAXSTACK) { /* stack overflow? */
+ luaD_reallocstack(L, ERRORSTACKSIZE);
+ luaG_runerror(L, "stack overflow");
+ }
+ else
+ luaD_reallocstack(L, newsize);
+ }
+}
+
+
+static int stackinuse (lua_State *L) {
+ CallInfo *ci;
+ StkId lim = L->top;
+ for (ci = L->ci; ci != NULL; ci = ci->previous) {
+ if (lim < ci->top) lim = ci->top;
+ }
+ lua_assert(lim <= L->stack_last);
+ return cast_int(lim - L->stack) + 1; /* part of stack in use */
+}
+
+
+void luaD_shrinkstack (lua_State *L) {
+ int inuse = stackinuse(L);
+ int goodsize = inuse + (inuse / 8) + 2*EXTRA_STACK;
+ if (goodsize > LUAI_MAXSTACK)
+ goodsize = LUAI_MAXSTACK; /* respect stack limit */
+ if (L->stacksize > LUAI_MAXSTACK) /* had been handling stack overflow? */
+ luaE_freeCI(L); /* free all CIs (list grew because of an error) */
+ else
+ luaE_shrinkCI(L); /* shrink list */
+ /* if thread is currently not handling a stack overflow and its
+ good size is smaller than current size, shrink its stack */
+ if (inuse <= (LUAI_MAXSTACK - EXTRA_STACK) &&
+ goodsize < L->stacksize)
+ luaD_reallocstack(L, goodsize);
+ else /* don't change stack */
+ condmovestack(L,{},{}); /* (change only for debugging) */
+}
+
+
+void luaD_inctop (lua_State *L) {
+ luaD_checkstack(L, 1);
+ L->top++;
+}
+
+/* }================================================================== */
+
+
+/*
+** Call a hook for the given event. Make sure there is a hook to be
+** called. (Both 'L->hook' and 'L->hookmask', which triggers this
+** function, can be changed asynchronously by signals.)
+*/
+void luaD_hook (lua_State *L, int event, int line) {
+ lua_Hook hook = L->hook;
+ if (hook && L->allowhook) { /* make sure there is a hook */
+ CallInfo *ci = L->ci;
+ ptrdiff_t top = savestack(L, L->top);
+ ptrdiff_t ci_top = savestack(L, ci->top);
+ lua_Debug ar;
+ ar.event = event;
+ ar.currentline = line;
+ ar.i_ci = ci;
+ luaD_checkstack(L, LUA_MINSTACK); /* ensure minimum stack size */
+ ci->top = L->top + LUA_MINSTACK;
+ lua_assert(ci->top <= L->stack_last);
+ L->allowhook = 0; /* cannot call hooks inside a hook */
+ ci->callstatus |= CIST_HOOKED;
+ lua_unlock(L);
+ (*hook)(L, &ar);
+ lua_lock(L);
+ lua_assert(!L->allowhook);
+ L->allowhook = 1;
+ ci->top = restorestack(L, ci_top);
+ L->top = restorestack(L, top);
+ ci->callstatus &= ~CIST_HOOKED;
+ }
+}
+
+
+static void callhook (lua_State *L, CallInfo *ci) {
+ int hook = LUA_HOOKCALL;
+ ci->u.l.savedpc++; /* hooks assume 'pc' is already incremented */
+ if (isLua(ci->previous) &&
+ GET_OPCODE(*(ci->previous->u.l.savedpc - 1)) == OP_TAILCALL) {
+ ci->callstatus |= CIST_TAIL;
+ hook = LUA_HOOKTAILCALL;
+ }
+ luaD_hook(L, hook, -1);
+ ci->u.l.savedpc--; /* correct 'pc' */
+}
+
+
+static StkId adjust_varargs (lua_State *L, Proto *p, int actual) {
+ int i;
+ int nfixargs = p->numparams;
+ StkId base, fixed;
+ /* move fixed parameters to final position */
+ fixed = L->top - actual; /* first fixed argument */
+ base = L->top; /* final position of first argument */
+ for (i = 0; i < nfixargs && i < actual; i++) {
+ setobjs2s(L, L->top++, fixed + i);
+ setnilvalue(fixed + i); /* erase original copy (for GC) */
+ }
+ for (; i < nfixargs; i++)
+ setnilvalue(L->top++); /* complete missing arguments */
+ return base;
+}
+
+
+/*
+** Check whether __call metafield of 'func' is a function. If so, put
+** it in stack below original 'func' so that 'luaD_precall' can call
+** it. Raise an error if __call metafield is not a function.
+*/
+static void tryfuncTM (lua_State *L, StkId func) {
+ const TValue *tm = luaT_gettmbyobj(L, func, TM_CALL);
+ StkId p;
+ if (!ttisfunction(tm))
+ luaG_typeerror(L, func, "call");
+ /* Open a hole inside the stack at 'func' */
+ for (p = L->top; p > func; p--)
+ setobjs2s(L, p, p-1);
+ L->top++; /* slot ensured by caller */
+ setobj2s(L, func, tm); /* tag method is the new function to be called */
+}
+
+
+/*
+** Given 'nres' results at 'firstResult', move 'wanted' of them to 'res'.
+** Handle most typical cases (zero results for commands, one result for
+** expressions, multiple results for tail calls/single parameters)
+** separated.
+*/
+static int moveresults (lua_State *L, const TValue *firstResult, StkId res,
+ int nres, int wanted) {
+ switch (wanted) { /* handle typical cases separately */
+ case 0: break; /* nothing to move */
+ case 1: { /* one result needed */
+ if (nres == 0) /* no results? */
+ firstResult = luaO_nilobject; /* adjust with nil */
+ setobjs2s(L, res, firstResult); /* move it to proper place */
+ break;
+ }
+ case LUA_MULTRET: {
+ int i;
+ for (i = 0; i < nres; i++) /* move all results to correct place */
+ setobjs2s(L, res + i, firstResult + i);
+ L->top = res + nres;
+ return 0; /* wanted == LUA_MULTRET */
+ }
+ default: {
+ int i;
+ if (wanted <= nres) { /* enough results? */
+ for (i = 0; i < wanted; i++) /* move wanted results to correct place */
+ setobjs2s(L, res + i, firstResult + i);
+ }
+ else { /* not enough results; use all of them plus nils */
+ for (i = 0; i < nres; i++) /* move all results to correct place */
+ setobjs2s(L, res + i, firstResult + i);
+ for (; i < wanted; i++) /* complete wanted number of results */
+ setnilvalue(res + i);
+ }
+ break;
+ }
+ }
+ L->top = res + wanted; /* top points after the last result */
+ return 1;
+}
+
+
+/*
+** Finishes a function call: calls hook if necessary, removes CallInfo,
+** moves current number of results to proper place; returns 0 iff call
+** wanted multiple (variable number of) results.
+*/
+int luaD_poscall (lua_State *L, CallInfo *ci, StkId firstResult, int nres) {
+ StkId res;
+ int wanted = ci->nresults;
+ if (L->hookmask & (LUA_MASKRET | LUA_MASKLINE)) {
+ if (L->hookmask & LUA_MASKRET) {
+ ptrdiff_t fr = savestack(L, firstResult); /* hook may change stack */
+ luaD_hook(L, LUA_HOOKRET, -1);
+ firstResult = restorestack(L, fr);
+ }
+ L->oldpc = ci->previous->u.l.savedpc; /* 'oldpc' for caller function */
+ }
+ res = ci->func; /* res == final position of 1st result */
+ L->ci = ci->previous; /* back to caller */
+ /* move results to proper place */
+ return moveresults(L, firstResult, res, nres, wanted);
+}
+
+
+
+#define next_ci(L) (L->ci = (L->ci->next ? L->ci->next : luaE_extendCI(L)))
+
+
+/* macro to check stack size, preserving 'p' */
+#define checkstackp(L,n,p) \
+ luaD_checkstackaux(L, n, \
+ ptrdiff_t t__ = savestack(L, p); /* save 'p' */ \
+ luaC_checkGC(L), /* stack grow uses memory */ \
+ p = restorestack(L, t__)) /* 'pos' part: restore 'p' */
+
+
+/*
+** Prepares a function call: checks the stack, creates a new CallInfo
+** entry, fills in the relevant information, calls hook if needed.
+** If function is a C function, does the call, too. (Otherwise, leave
+** the execution ('luaV_execute') to the caller, to allow stackless
+** calls.) Returns true iff function has been executed (C function).
+*/
+int luaD_precall (lua_State *L, StkId func, int nresults) {
+ lua_CFunction f;
+ CallInfo *ci;
+ switch (ttype(func)) {
+ case LUA_TCCL: /* C closure */
+ f = clCvalue(func)->f;
+ goto Cfunc;
+ case LUA_TLCF: /* light C function */
+ f = fvalue(func);
+ Cfunc: {
+ int n; /* number of returns */
+ checkstackp(L, LUA_MINSTACK, func); /* ensure minimum stack size */
+ ci = next_ci(L); /* now 'enter' new function */
+ ci->nresults = nresults;
+ ci->func = func;
+ ci->top = L->top + LUA_MINSTACK;
+ lua_assert(ci->top <= L->stack_last);
+ ci->callstatus = 0;
+ if (L->hookmask & LUA_MASKCALL)
+ luaD_hook(L, LUA_HOOKCALL, -1);
+ lua_unlock(L);
+ n = (*f)(L); /* do the actual call */
+ lua_lock(L);
+ api_checknelems(L, n);
+ luaD_poscall(L, ci, L->top - n, n);
+ return 1;
+ }
+ case LUA_TLCL: { /* Lua function: prepare its call */
+ StkId base;
+ Proto *p = clLvalue(func)->p;
+ int n = cast_int(L->top - func) - 1; /* number of real arguments */
+ int fsize = p->maxstacksize; /* frame size */
+ checkstackp(L, fsize, func);
+ if (p->is_vararg)
+ base = adjust_varargs(L, p, n);
+ else { /* non vararg function */
+ for (; n < p->numparams; n++)
+ setnilvalue(L->top++); /* complete missing arguments */
+ base = func + 1;
+ }
+ ci = next_ci(L); /* now 'enter' new function */
+ ci->nresults = nresults;
+ ci->func = func;
+ ci->u.l.base = base;
+ L->top = ci->top = base + fsize;
+ lua_assert(ci->top <= L->stack_last);
+ ci->u.l.savedpc = p->code; /* starting point */
+ ci->callstatus = CIST_LUA;
+ if (L->hookmask & LUA_MASKCALL)
+ callhook(L, ci);
+ return 0;
+ }
+ default: { /* not a function */
+ checkstackp(L, 1, func); /* ensure space for metamethod */
+ tryfuncTM(L, func); /* try to get '__call' metamethod */
+ return luaD_precall(L, func, nresults); /* now it must be a function */
+ }
+ }
+}
+
+
+/*
+** Check appropriate error for stack overflow ("regular" overflow or
+** overflow while handling stack overflow). If 'nCalls' is larger than
+** LUAI_MAXCCALLS (which means it is handling a "regular" overflow) but
+** smaller than 9/8 of LUAI_MAXCCALLS, does not report an error (to
+** allow overflow handling to work)
+*/
+static void stackerror (lua_State *L) {
+ if (L->nCcalls == LUAI_MAXCCALLS)
+ luaG_runerror(L, "C stack overflow");
+ else if (L->nCcalls >= (LUAI_MAXCCALLS + (LUAI_MAXCCALLS>>3)))
+ luaD_throw(L, LUA_ERRERR); /* error while handing stack error */
+}
+
+
+/*
+** Call a function (C or Lua). The function to be called is at *func.
+** The arguments are on the stack, right after the function.
+** When returns, all the results are on the stack, starting at the original
+** function position.
+*/
+void luaD_call (lua_State *L, StkId func, int nResults) {
+ if (++L->nCcalls >= LUAI_MAXCCALLS)
+ stackerror(L);
+ if (!luaD_precall(L, func, nResults)) /* is a Lua function? */
+ luaV_execute(L); /* call it */
+ L->nCcalls--;
+}
+
+
+/*
+** Similar to 'luaD_call', but does not allow yields during the call
+*/
+void luaD_callnoyield (lua_State *L, StkId func, int nResults) {
+ L->nny++;
+ luaD_call(L, func, nResults);
+ L->nny--;
+}
+
+
+/*
+** Completes the execution of an interrupted C function, calling its
+** continuation function.
+*/
+static void finishCcall (lua_State *L, int status) {
+ CallInfo *ci = L->ci;
+ int n;
+ /* must have a continuation and must be able to call it */
+ lua_assert(ci->u.c.k != NULL && L->nny == 0);
+ /* error status can only happen in a protected call */
+ lua_assert((ci->callstatus & CIST_YPCALL) || status == LUA_YIELD);
+ if (ci->callstatus & CIST_YPCALL) { /* was inside a pcall? */
+ ci->callstatus &= ~CIST_YPCALL; /* continuation is also inside it */
+ L->errfunc = ci->u.c.old_errfunc; /* with the same error function */
+ }
+ /* finish 'lua_callk'/'lua_pcall'; CIST_YPCALL and 'errfunc' already
+ handled */
+ adjustresults(L, ci->nresults);
+ lua_unlock(L);
+ n = (*ci->u.c.k)(L, status, ci->u.c.ctx); /* call continuation function */
+ lua_lock(L);
+ api_checknelems(L, n);
+ luaD_poscall(L, ci, L->top - n, n); /* finish 'luaD_precall' */
+}
+
+
+/*
+** Executes "full continuation" (everything in the stack) of a
+** previously interrupted coroutine until the stack is empty (or another
+** interruption long-jumps out of the loop). If the coroutine is
+** recovering from an error, 'ud' points to the error status, which must
+** be passed to the first continuation function (otherwise the default
+** status is LUA_YIELD).
+*/
+static void unroll (lua_State *L, void *ud) {
+ if (ud != NULL) /* error status? */
+ finishCcall(L, *(int *)ud); /* finish 'lua_pcallk' callee */
+ while (L->ci != &L->base_ci) { /* something in the stack */
+ if (!isLua(L->ci)) /* C function? */
+ finishCcall(L, LUA_YIELD); /* complete its execution */
+ else { /* Lua function */
+ luaV_finishOp(L); /* finish interrupted instruction */
+ luaV_execute(L); /* execute down to higher C 'boundary' */
+ }
+ }
+}
+
+
+/*
+** Try to find a suspended protected call (a "recover point") for the
+** given thread.
+*/
+static CallInfo *findpcall (lua_State *L) {
+ CallInfo *ci;
+ for (ci = L->ci; ci != NULL; ci = ci->previous) { /* search for a pcall */
+ if (ci->callstatus & CIST_YPCALL)
+ return ci;
+ }
+ return NULL; /* no pending pcall */
+}
+
+
+/*
+** Recovers from an error in a coroutine. Finds a recover point (if
+** there is one) and completes the execution of the interrupted
+** 'luaD_pcall'. If there is no recover point, returns zero.
+*/
+static int recover (lua_State *L, int status) {
+ StkId oldtop;
+ CallInfo *ci = findpcall(L);
+ if (ci == NULL) return 0; /* no recovery point */
+ /* "finish" luaD_pcall */
+ oldtop = restorestack(L, ci->extra);
+ luaF_close(L, oldtop);
+ seterrorobj(L, status, oldtop);
+ L->ci = ci;
+ L->allowhook = getoah(ci->callstatus); /* restore original 'allowhook' */
+ L->nny = 0; /* should be zero to be yieldable */
+ luaD_shrinkstack(L);
+ L->errfunc = ci->u.c.old_errfunc;
+ return 1; /* continue running the coroutine */
+}
+
+
+/*
+** Signal an error in the call to 'lua_resume', not in the execution
+** of the coroutine itself. (Such errors should not be handled by any
+** coroutine error handler and should not kill the coroutine.)
+*/
+static int resume_error (lua_State *L, const char *msg, int narg) {
+ L->top -= narg; /* remove args from the stack */
+ setsvalue2s(L, L->top, luaS_new(L, msg)); /* push error message */
+ api_incr_top(L);
+ lua_unlock(L);
+ return LUA_ERRRUN;
+}
+
+
+/*
+** Do the work for 'lua_resume' in protected mode. Most of the work
+** depends on the status of the coroutine: initial state, suspended
+** inside a hook, or regularly suspended (optionally with a continuation
+** function), plus erroneous cases: non-suspended coroutine or dead
+** coroutine.
+*/
+static void resume (lua_State *L, void *ud) {
+ int n = *(cast(int*, ud)); /* number of arguments */
+ StkId firstArg = L->top - n; /* first argument */
+ CallInfo *ci = L->ci;
+ if (L->status == LUA_OK) { /* starting a coroutine? */
+ if (!luaD_precall(L, firstArg - 1, LUA_MULTRET)) /* Lua function? */
+ luaV_execute(L); /* call it */
+ }
+ else { /* resuming from previous yield */
+ lua_assert(L->status == LUA_YIELD);
+ L->status = LUA_OK; /* mark that it is running (again) */
+ ci->func = restorestack(L, ci->extra);
+ if (isLua(ci)) /* yielded inside a hook? */
+ luaV_execute(L); /* just continue running Lua code */
+ else { /* 'common' yield */
+ if (ci->u.c.k != NULL) { /* does it have a continuation function? */
+ lua_unlock(L);
+ n = (*ci->u.c.k)(L, LUA_YIELD, ci->u.c.ctx); /* call continuation */
+ lua_lock(L);
+ api_checknelems(L, n);
+ firstArg = L->top - n; /* yield results come from continuation */
+ }
+ luaD_poscall(L, ci, firstArg, n); /* finish 'luaD_precall' */
+ }
+ unroll(L, NULL); /* run continuation */
+ }
+}
+
+
+LUA_API int lua_resume (lua_State *L, lua_State *from, int nargs) {
+ int status;
+ unsigned short oldnny = L->nny; /* save "number of non-yieldable" calls */
+ lua_lock(L);
+ if (L->status == LUA_OK) { /* may be starting a coroutine */
+ if (L->ci != &L->base_ci) /* not in base level? */
+ return resume_error(L, "cannot resume non-suspended coroutine", nargs);
+ }
+ else if (L->status != LUA_YIELD)
+ return resume_error(L, "cannot resume dead coroutine", nargs);
+ L->nCcalls = (from) ? from->nCcalls + 1 : 1;
+ if (L->nCcalls >= LUAI_MAXCCALLS)
+ return resume_error(L, "C stack overflow", nargs);
+ luai_userstateresume(L, nargs);
+ L->nny = 0; /* allow yields */
+ api_checknelems(L, (L->status == LUA_OK) ? nargs + 1 : nargs);
+ status = luaD_rawrunprotected(L, resume, &nargs);
+ if (status == -1) /* error calling 'lua_resume'? */
+ status = LUA_ERRRUN;
+ else { /* continue running after recoverable errors */
+ while (errorstatus(status) && recover(L, status)) {
+ /* unroll continuation */
+ status = luaD_rawrunprotected(L, unroll, &status);
+ }
+ if (errorstatus(status)) { /* unrecoverable error? */
+ L->status = cast_byte(status); /* mark thread as 'dead' */
+ seterrorobj(L, status, L->top); /* push error message */
+ L->ci->top = L->top;
+ }
+ else lua_assert(status == L->status); /* normal end or yield */
+ }
+ L->nny = oldnny; /* restore 'nny' */
+ L->nCcalls--;
+ lua_assert(L->nCcalls == ((from) ? from->nCcalls : 0));
+ lua_unlock(L);
+ return status;
+}
+
+
+LUA_API int lua_isyieldable (lua_State *L) {
+ return (L->nny == 0);
+}
+
+
+LUA_API int lua_yieldk (lua_State *L, int nresults, lua_KContext ctx,
+ lua_KFunction k) {
+ CallInfo *ci = L->ci;
+ luai_userstateyield(L, nresults);
+ lua_lock(L);
+ api_checknelems(L, nresults);
+ if (L->nny > 0) {
+ if (L != G(L)->mainthread)
+ luaG_runerror(L, "attempt to yield across a C-call boundary");
+ else
+ luaG_runerror(L, "attempt to yield from outside a coroutine");
+ }
+ L->status = LUA_YIELD;
+ ci->extra = savestack(L, ci->func); /* save current 'func' */
+ if (isLua(ci)) { /* inside a hook? */
+ api_check(L, k == NULL, "hooks cannot continue after yielding");
+ }
+ else {
+ if ((ci->u.c.k = k) != NULL) /* is there a continuation? */
+ ci->u.c.ctx = ctx; /* save context */
+ ci->func = L->top - nresults - 1; /* protect stack below results */
+ luaD_throw(L, LUA_YIELD);
+ }
+ lua_assert(ci->callstatus & CIST_HOOKED); /* must be inside a hook */
+ lua_unlock(L);
+ return 0; /* return to 'luaD_hook' */
+}
+
+
+int luaD_pcall (lua_State *L, Pfunc func, void *u,
+ ptrdiff_t old_top, ptrdiff_t ef) {
+ int status;
+ CallInfo *old_ci = L->ci;
+ lu_byte old_allowhooks = L->allowhook;
+ unsigned short old_nny = L->nny;
+ ptrdiff_t old_errfunc = L->errfunc;
+ L->errfunc = ef;
+ status = luaD_rawrunprotected(L, func, u);
+ if (status != LUA_OK) { /* an error occurred? */
+ StkId oldtop = restorestack(L, old_top);
+ luaF_close(L, oldtop); /* close possible pending closures */
+ seterrorobj(L, status, oldtop);
+ L->ci = old_ci;
+ L->allowhook = old_allowhooks;
+ L->nny = old_nny;
+ luaD_shrinkstack(L);
+ }
+ L->errfunc = old_errfunc;
+ return status;
+}
+
+
+
+/*
+** Execute a protected parser.
+*/
+struct SParser { /* data to 'f_parser' */
+ ZIO *z;
+ Mbuffer buff; /* dynamic structure used by the scanner */
+ Dyndata dyd; /* dynamic structures used by the parser */
+ const char *mode;
+ const char *name;
+};
+
+
+static void checkmode (lua_State *L, const char *mode, const char *x) {
+ if (mode && strchr(mode, x[0]) == NULL) {
+ luaO_pushfstring(L,
+ "attempt to load a %s chunk (mode is '%s')", x, mode);
+ luaD_throw(L, LUA_ERRSYNTAX);
+ }
+}
+
+
+static void f_parser (lua_State *L, void *ud) {
+ LClosure *cl;
+ struct SParser *p = cast(struct SParser *, ud);
+ int c = zgetc(p->z); /* read first character */
+ if (c == LUA_SIGNATURE[0]) {
+ checkmode(L, p->mode, "binary");
+ cl = luaU_undump(L, p->z, p->name);
+ }
+ else {
+ checkmode(L, p->mode, "text");
+ cl = luaY_parser(L, p->z, &p->buff, &p->dyd, p->name, c);
+ }
+ lua_assert(cl->nupvalues == cl->p->sizeupvalues);
+ luaF_initupvals(L, cl);
+}
+
+
+int luaD_protectedparser (lua_State *L, ZIO *z, const char *name,
+ const char *mode) {
+ struct SParser p;
+ int status;
+ L->nny++; /* cannot yield during parsing */
+ p.z = z; p.name = name; p.mode = mode;
+ p.dyd.actvar.arr = NULL; p.dyd.actvar.size = 0;
+ p.dyd.gt.arr = NULL; p.dyd.gt.size = 0;
+ p.dyd.label.arr = NULL; p.dyd.label.size = 0;
+ luaZ_initbuffer(L, &p.buff);
+ status = luaD_pcall(L, f_parser, &p, savestack(L, L->top), L->errfunc);
+ luaZ_freebuffer(L, &p.buff);
+ luaM_freearray(L, p.dyd.actvar.arr, p.dyd.actvar.size);
+ luaM_freearray(L, p.dyd.gt.arr, p.dyd.gt.size);
+ luaM_freearray(L, p.dyd.label.arr, p.dyd.label.size);
+ L->nny--;
+ return status;
+}
+
+
diff --git a/lua/src/ldo.h b/lua/src/ldo.h
new file mode 100644
index 000000000..4f5d51c3c
--- /dev/null
+++ b/lua/src/ldo.h
@@ -0,0 +1,58 @@
+/*
+** $Id: ldo.h,v 2.29 2015/12/21 13:02:14 roberto Exp $
+** Stack and Call structure of Lua
+** See Copyright Notice in lua.h
+*/
+
+#ifndef ldo_h
+#define ldo_h
+
+
+#include "lobject.h"
+#include "lstate.h"
+#include "lzio.h"
+
+
+/*
+** Macro to check stack size and grow stack if needed. Parameters
+** 'pre'/'pos' allow the macro to preserve a pointer into the
+** stack across reallocations, doing the work only when needed.
+** 'condmovestack' is used in heavy tests to force a stack reallocation
+** at every check.
+*/
+#define luaD_checkstackaux(L,n,pre,pos) \
+ if (L->stack_last - L->top <= (n)) \
+ { pre; luaD_growstack(L, n); pos; } else { condmovestack(L,pre,pos); }
+
+/* In general, 'pre'/'pos' are empty (nothing to save) */
+#define luaD_checkstack(L,n) luaD_checkstackaux(L,n,(void)0,(void)0)
+
+
+
+#define savestack(L,p) ((char *)(p) - (char *)L->stack)
+#define restorestack(L,n) ((TValue *)((char *)L->stack + (n)))
+
+
+/* type of protected functions, to be ran by 'runprotected' */
+typedef void (*Pfunc) (lua_State *L, void *ud);
+
+LUAI_FUNC int luaD_protectedparser (lua_State *L, ZIO *z, const char *name,
+ const char *mode);
+LUAI_FUNC void luaD_hook (lua_State *L, int event, int line);
+LUAI_FUNC int luaD_precall (lua_State *L, StkId func, int nresults);
+LUAI_FUNC void luaD_call (lua_State *L, StkId func, int nResults);
+LUAI_FUNC void luaD_callnoyield (lua_State *L, StkId func, int nResults);
+LUAI_FUNC int luaD_pcall (lua_State *L, Pfunc func, void *u,
+ ptrdiff_t oldtop, ptrdiff_t ef);
+LUAI_FUNC int luaD_poscall (lua_State *L, CallInfo *ci, StkId firstResult,
+ int nres);
+LUAI_FUNC void luaD_reallocstack (lua_State *L, int newsize);
+LUAI_FUNC void luaD_growstack (lua_State *L, int n);
+LUAI_FUNC void luaD_shrinkstack (lua_State *L);
+LUAI_FUNC void luaD_inctop (lua_State *L);
+
+LUAI_FUNC l_noret luaD_throw (lua_State *L, int errcode);
+LUAI_FUNC int luaD_rawrunprotected (lua_State *L, Pfunc f, void *ud);
+
+#endif
+
diff --git a/lua/src/ldump.c b/lua/src/ldump.c
new file mode 100644
index 000000000..016e30082
--- /dev/null
+++ b/lua/src/ldump.c
@@ -0,0 +1,215 @@
+/*
+** $Id: ldump.c,v 2.37 2015/10/08 15:53:49 roberto Exp $
+** save precompiled Lua chunks
+** See Copyright Notice in lua.h
+*/
+
+#define ldump_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+
+#include "lua.h"
+
+#include "lobject.h"
+#include "lstate.h"
+#include "lundump.h"
+
+
+typedef struct {
+ lua_State *L;
+ lua_Writer writer;
+ void *data;
+ int strip;
+ int status;
+} DumpState;
+
+
+/*
+** All high-level dumps go through DumpVector; you can change it to
+** change the endianness of the result
+*/
+#define DumpVector(v,n,D) DumpBlock(v,(n)*sizeof((v)[0]),D)
+
+#define DumpLiteral(s,D) DumpBlock(s, sizeof(s) - sizeof(char), D)
+
+
+static void DumpBlock (const void *b, size_t size, DumpState *D) {
+ if (D->status == 0 && size > 0) {
+ lua_unlock(D->L);
+ D->status = (*D->writer)(D->L, b, size, D->data);
+ lua_lock(D->L);
+ }
+}
+
+
+#define DumpVar(x,D) DumpVector(&x,1,D)
+
+
+static void DumpByte (int y, DumpState *D) {
+ lu_byte x = (lu_byte)y;
+ DumpVar(x, D);
+}
+
+
+static void DumpInt (int x, DumpState *D) {
+ DumpVar(x, D);
+}
+
+
+static void DumpNumber (lua_Number x, DumpState *D) {
+ DumpVar(x, D);
+}
+
+
+static void DumpInteger (lua_Integer x, DumpState *D) {
+ DumpVar(x, D);
+}
+
+
+static void DumpString (const TString *s, DumpState *D) {
+ if (s == NULL)
+ DumpByte(0, D);
+ else {
+ size_t size = tsslen(s) + 1; /* include trailing '\0' */
+ const char *str = getstr(s);
+ if (size < 0xFF)
+ DumpByte(cast_int(size), D);
+ else {
+ DumpByte(0xFF, D);
+ DumpVar(size, D);
+ }
+ DumpVector(str, size - 1, D); /* no need to save '\0' */
+ }
+}
+
+
+static void DumpCode (const Proto *f, DumpState *D) {
+ DumpInt(f->sizecode, D);
+ DumpVector(f->code, f->sizecode, D);
+}
+
+
+static void DumpFunction(const Proto *f, TString *psource, DumpState *D);
+
+static void DumpConstants (const Proto *f, DumpState *D) {
+ int i;
+ int n = f->sizek;
+ DumpInt(n, D);
+ for (i = 0; i < n; i++) {
+ const TValue *o = &f->k[i];
+ DumpByte(ttype(o), D);
+ switch (ttype(o)) {
+ case LUA_TNIL:
+ break;
+ case LUA_TBOOLEAN:
+ DumpByte(bvalue(o), D);
+ break;
+ case LUA_TNUMFLT:
+ DumpNumber(fltvalue(o), D);
+ break;
+ case LUA_TNUMINT:
+ DumpInteger(ivalue(o), D);
+ break;
+ case LUA_TSHRSTR:
+ case LUA_TLNGSTR:
+ DumpString(tsvalue(o), D);
+ break;
+ default:
+ lua_assert(0);
+ }
+ }
+}
+
+
+static void DumpProtos (const Proto *f, DumpState *D) {
+ int i;
+ int n = f->sizep;
+ DumpInt(n, D);
+ for (i = 0; i < n; i++)
+ DumpFunction(f->p[i], f->source, D);
+}
+
+
+static void DumpUpvalues (const Proto *f, DumpState *D) {
+ int i, n = f->sizeupvalues;
+ DumpInt(n, D);
+ for (i = 0; i < n; i++) {
+ DumpByte(f->upvalues[i].instack, D);
+ DumpByte(f->upvalues[i].idx, D);
+ }
+}
+
+
+static void DumpDebug (const Proto *f, DumpState *D) {
+ int i, n;
+ n = (D->strip) ? 0 : f->sizelineinfo;
+ DumpInt(n, D);
+ DumpVector(f->lineinfo, n, D);
+ n = (D->strip) ? 0 : f->sizelocvars;
+ DumpInt(n, D);
+ for (i = 0; i < n; i++) {
+ DumpString(f->locvars[i].varname, D);
+ DumpInt(f->locvars[i].startpc, D);
+ DumpInt(f->locvars[i].endpc, D);
+ }
+ n = (D->strip) ? 0 : f->sizeupvalues;
+ DumpInt(n, D);
+ for (i = 0; i < n; i++)
+ DumpString(f->upvalues[i].name, D);
+}
+
+
+static void DumpFunction (const Proto *f, TString *psource, DumpState *D) {
+ if (D->strip || f->source == psource)
+ DumpString(NULL, D); /* no debug info or same source as its parent */
+ else
+ DumpString(f->source, D);
+ DumpInt(f->linedefined, D);
+ DumpInt(f->lastlinedefined, D);
+ DumpByte(f->numparams, D);
+ DumpByte(f->is_vararg, D);
+ DumpByte(f->maxstacksize, D);
+ DumpCode(f, D);
+ DumpConstants(f, D);
+ DumpUpvalues(f, D);
+ DumpProtos(f, D);
+ DumpDebug(f, D);
+}
+
+
+static void DumpHeader (DumpState *D) {
+ DumpLiteral(LUA_SIGNATURE, D);
+ DumpByte(LUAC_VERSION, D);
+ DumpByte(LUAC_FORMAT, D);
+ DumpLiteral(LUAC_DATA, D);
+ DumpByte(sizeof(int), D);
+ DumpByte(sizeof(size_t), D);
+ DumpByte(sizeof(Instruction), D);
+ DumpByte(sizeof(lua_Integer), D);
+ DumpByte(sizeof(lua_Number), D);
+ DumpInteger(LUAC_INT, D);
+ DumpNumber(LUAC_NUM, D);
+}
+
+
+/*
+** dump Lua function as precompiled chunk
+*/
+int luaU_dump(lua_State *L, const Proto *f, lua_Writer w, void *data,
+ int strip) {
+ DumpState D;
+ D.L = L;
+ D.writer = w;
+ D.data = data;
+ D.strip = strip;
+ D.status = 0;
+ DumpHeader(&D);
+ DumpByte(f->sizeupvalues, &D);
+ DumpFunction(f, NULL, &D);
+ return D.status;
+}
+
diff --git a/lua/src/lfunc.c b/lua/src/lfunc.c
new file mode 100644
index 000000000..67967dab3
--- /dev/null
+++ b/lua/src/lfunc.c
@@ -0,0 +1,151 @@
+/*
+** $Id: lfunc.c,v 2.45 2014/11/02 19:19:04 roberto Exp $
+** Auxiliary functions to manipulate prototypes and closures
+** See Copyright Notice in lua.h
+*/
+
+#define lfunc_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+
+#include "lua.h"
+
+#include "lfunc.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+
+
+
+CClosure *luaF_newCclosure (lua_State *L, int n) {
+ GCObject *o = luaC_newobj(L, LUA_TCCL, sizeCclosure(n));
+ CClosure *c = gco2ccl(o);
+ c->nupvalues = cast_byte(n);
+ return c;
+}
+
+
+LClosure *luaF_newLclosure (lua_State *L, int n) {
+ GCObject *o = luaC_newobj(L, LUA_TLCL, sizeLclosure(n));
+ LClosure *c = gco2lcl(o);
+ c->p = NULL;
+ c->nupvalues = cast_byte(n);
+ while (n--) c->upvals[n] = NULL;
+ return c;
+}
+
+/*
+** fill a closure with new closed upvalues
+*/
+void luaF_initupvals (lua_State *L, LClosure *cl) {
+ int i;
+ for (i = 0; i < cl->nupvalues; i++) {
+ UpVal *uv = luaM_new(L, UpVal);
+ uv->refcount = 1;
+ uv->v = &uv->u.value; /* make it closed */
+ setnilvalue(uv->v);
+ cl->upvals[i] = uv;
+ }
+}
+
+
+UpVal *luaF_findupval (lua_State *L, StkId level) {
+ UpVal **pp = &L->openupval;
+ UpVal *p;
+ UpVal *uv;
+ lua_assert(isintwups(L) || L->openupval == NULL);
+ while (*pp != NULL && (p = *pp)->v >= level) {
+ lua_assert(upisopen(p));
+ if (p->v == level) /* found a corresponding upvalue? */
+ return p; /* return it */
+ pp = &p->u.open.next;
+ }
+ /* not found: create a new upvalue */
+ uv = luaM_new(L, UpVal);
+ uv->refcount = 0;
+ uv->u.open.next = *pp; /* link it to list of open upvalues */
+ uv->u.open.touched = 1;
+ *pp = uv;
+ uv->v = level; /* current value lives in the stack */
+ if (!isintwups(L)) { /* thread not in list of threads with upvalues? */
+ L->twups = G(L)->twups; /* link it to the list */
+ G(L)->twups = L;
+ }
+ return uv;
+}
+
+
+void luaF_close (lua_State *L, StkId level) {
+ UpVal *uv;
+ while (L->openupval != NULL && (uv = L->openupval)->v >= level) {
+ lua_assert(upisopen(uv));
+ L->openupval = uv->u.open.next; /* remove from 'open' list */
+ if (uv->refcount == 0) /* no references? */
+ luaM_free(L, uv); /* free upvalue */
+ else {
+ setobj(L, &uv->u.value, uv->v); /* move value to upvalue slot */
+ uv->v = &uv->u.value; /* now current value lives here */
+ luaC_upvalbarrier(L, uv);
+ }
+ }
+}
+
+
+Proto *luaF_newproto (lua_State *L) {
+ GCObject *o = luaC_newobj(L, LUA_TPROTO, sizeof(Proto));
+ Proto *f = gco2p(o);
+ f->k = NULL;
+ f->sizek = 0;
+ f->p = NULL;
+ f->sizep = 0;
+ f->code = NULL;
+ f->cache = NULL;
+ f->sizecode = 0;
+ f->lineinfo = NULL;
+ f->sizelineinfo = 0;
+ f->upvalues = NULL;
+ f->sizeupvalues = 0;
+ f->numparams = 0;
+ f->is_vararg = 0;
+ f->maxstacksize = 0;
+ f->locvars = NULL;
+ f->sizelocvars = 0;
+ f->linedefined = 0;
+ f->lastlinedefined = 0;
+ f->source = NULL;
+ return f;
+}
+
+
+void luaF_freeproto (lua_State *L, Proto *f) {
+ luaM_freearray(L, f->code, f->sizecode);
+ luaM_freearray(L, f->p, f->sizep);
+ luaM_freearray(L, f->k, f->sizek);
+ luaM_freearray(L, f->lineinfo, f->sizelineinfo);
+ luaM_freearray(L, f->locvars, f->sizelocvars);
+ luaM_freearray(L, f->upvalues, f->sizeupvalues);
+ luaM_free(L, f);
+}
+
+
+/*
+** Look for n-th local variable at line 'line' in function 'func'.
+** Returns NULL if not found.
+*/
+const char *luaF_getlocalname (const Proto *f, int local_number, int pc) {
+ int i;
+ for (i = 0; i<f->sizelocvars && f->locvars[i].startpc <= pc; i++) {
+ if (pc < f->locvars[i].endpc) { /* is variable active? */
+ local_number--;
+ if (local_number == 0)
+ return getstr(f->locvars[i].varname);
+ }
+ }
+ return NULL; /* not found */
+}
+
diff --git a/lua/src/lfunc.h b/lua/src/lfunc.h
new file mode 100644
index 000000000..2eeb0d5a4
--- /dev/null
+++ b/lua/src/lfunc.h
@@ -0,0 +1,61 @@
+/*
+** $Id: lfunc.h,v 2.15 2015/01/13 15:49:11 roberto Exp $
+** Auxiliary functions to manipulate prototypes and closures
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lfunc_h
+#define lfunc_h
+
+
+#include "lobject.h"
+
+
+#define sizeCclosure(n) (cast(int, sizeof(CClosure)) + \
+ cast(int, sizeof(TValue)*((n)-1)))
+
+#define sizeLclosure(n) (cast(int, sizeof(LClosure)) + \
+ cast(int, sizeof(TValue *)*((n)-1)))
+
+
+/* test whether thread is in 'twups' list */
+#define isintwups(L) (L->twups != L)
+
+
+/*
+** maximum number of upvalues in a closure (both C and Lua). (Value
+** must fit in a VM register.)
+*/
+#define MAXUPVAL 255
+
+
+/*
+** Upvalues for Lua closures
+*/
+struct UpVal {
+ TValue *v; /* points to stack or to its own value */
+ lu_mem refcount; /* reference counter */
+ union {
+ struct { /* (when open) */
+ UpVal *next; /* linked list */
+ int touched; /* mark to avoid cycles with dead threads */
+ } open;
+ TValue value; /* the value (when closed) */
+ } u;
+};
+
+#define upisopen(up) ((up)->v != &(up)->u.value)
+
+
+LUAI_FUNC Proto *luaF_newproto (lua_State *L);
+LUAI_FUNC CClosure *luaF_newCclosure (lua_State *L, int nelems);
+LUAI_FUNC LClosure *luaF_newLclosure (lua_State *L, int nelems);
+LUAI_FUNC void luaF_initupvals (lua_State *L, LClosure *cl);
+LUAI_FUNC UpVal *luaF_findupval (lua_State *L, StkId level);
+LUAI_FUNC void luaF_close (lua_State *L, StkId level);
+LUAI_FUNC void luaF_freeproto (lua_State *L, Proto *f);
+LUAI_FUNC const char *luaF_getlocalname (const Proto *func, int local_number,
+ int pc);
+
+
+#endif
diff --git a/lua/src/lgc.c b/lua/src/lgc.c
new file mode 100644
index 000000000..ba2c19e14
--- /dev/null
+++ b/lua/src/lgc.c
@@ -0,0 +1,1178 @@
+/*
+** $Id: lgc.c,v 2.215 2016/12/22 13:08:50 roberto Exp $
+** Garbage Collector
+** See Copyright Notice in lua.h
+*/
+
+#define lgc_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+
+
+/*
+** internal state for collector while inside the atomic phase. The
+** collector should never be in this state while running regular code.
+*/
+#define GCSinsideatomic (GCSpause + 1)
+
+/*
+** cost of sweeping one element (the size of a small object divided
+** by some adjust for the sweep speed)
+*/
+#define GCSWEEPCOST ((sizeof(TString) + 4) / 4)
+
+/* maximum number of elements to sweep in each single step */
+#define GCSWEEPMAX (cast_int((GCSTEPSIZE / GCSWEEPCOST) / 4))
+
+/* cost of calling one finalizer */
+#define GCFINALIZECOST GCSWEEPCOST
+
+
+/*
+** macro to adjust 'stepmul': 'stepmul' is actually used like
+** 'stepmul / STEPMULADJ' (value chosen by tests)
+*/
+#define STEPMULADJ 200
+
+
+/*
+** macro to adjust 'pause': 'pause' is actually used like
+** 'pause / PAUSEADJ' (value chosen by tests)
+*/
+#define PAUSEADJ 100
+
+
+/*
+** 'makewhite' erases all color bits then sets only the current white
+** bit
+*/
+#define maskcolors (~(bitmask(BLACKBIT) | WHITEBITS))
+#define makewhite(g,x) \
+ (x->marked = cast_byte((x->marked & maskcolors) | luaC_white(g)))
+
+#define white2gray(x) resetbits(x->marked, WHITEBITS)
+#define black2gray(x) resetbit(x->marked, BLACKBIT)
+
+
+#define valiswhite(x) (iscollectable(x) && iswhite(gcvalue(x)))
+
+#define checkdeadkey(n) lua_assert(!ttisdeadkey(gkey(n)) || ttisnil(gval(n)))
+
+
+#define checkconsistency(obj) \
+ lua_longassert(!iscollectable(obj) || righttt(obj))
+
+
+#define markvalue(g,o) { checkconsistency(o); \
+ if (valiswhite(o)) reallymarkobject(g,gcvalue(o)); }
+
+#define markobject(g,t) { if (iswhite(t)) reallymarkobject(g, obj2gco(t)); }
+
+/*
+** mark an object that can be NULL (either because it is really optional,
+** or it was stripped as debug info, or inside an uncompleted structure)
+*/
+#define markobjectN(g,t) { if (t) markobject(g,t); }
+
+static void reallymarkobject (global_State *g, GCObject *o);
+
+
+/*
+** {======================================================
+** Generic functions
+** =======================================================
+*/
+
+
+/*
+** one after last element in a hash array
+*/
+#define gnodelast(h) gnode(h, cast(size_t, sizenode(h)))
+
+
+/*
+** link collectable object 'o' into list pointed by 'p'
+*/
+#define linkgclist(o,p) ((o)->gclist = (p), (p) = obj2gco(o))
+
+
+/*
+** If key is not marked, mark its entry as dead. This allows key to be
+** collected, but keeps its entry in the table. A dead node is needed
+** when Lua looks up for a key (it may be part of a chain) and when
+** traversing a weak table (key might be removed from the table during
+** traversal). Other places never manipulate dead keys, because its
+** associated nil value is enough to signal that the entry is logically
+** empty.
+*/
+static void removeentry (Node *n) {
+ lua_assert(ttisnil(gval(n)));
+ if (valiswhite(gkey(n)))
+ setdeadvalue(wgkey(n)); /* unused and unmarked key; remove it */
+}
+
+
+/*
+** tells whether a key or value can be cleared from a weak
+** table. Non-collectable objects are never removed from weak
+** tables. Strings behave as 'values', so are never removed too. for
+** other objects: if really collected, cannot keep them; for objects
+** being finalized, keep them in keys, but not in values
+*/
+static int iscleared (global_State *g, const TValue *o) {
+ if (!iscollectable(o)) return 0;
+ else if (ttisstring(o)) {
+ markobject(g, tsvalue(o)); /* strings are 'values', so are never weak */
+ return 0;
+ }
+ else return iswhite(gcvalue(o));
+}
+
+
+/*
+** barrier that moves collector forward, that is, mark the white object
+** being pointed by a black object. (If in sweep phase, clear the black
+** object to white [sweep it] to avoid other barrier calls for this
+** same object.)
+*/
+void luaC_barrier_ (lua_State *L, GCObject *o, GCObject *v) {
+ global_State *g = G(L);
+ lua_assert(isblack(o) && iswhite(v) && !isdead(g, v) && !isdead(g, o));
+ if (keepinvariant(g)) /* must keep invariant? */
+ reallymarkobject(g, v); /* restore invariant */
+ else { /* sweep phase */
+ lua_assert(issweepphase(g));
+ makewhite(g, o); /* mark main obj. as white to avoid other barriers */
+ }
+}
+
+
+/*
+** barrier that moves collector backward, that is, mark the black object
+** pointing to a white object as gray again.
+*/
+void luaC_barrierback_ (lua_State *L, Table *t) {
+ global_State *g = G(L);
+ lua_assert(isblack(t) && !isdead(g, t));
+ black2gray(t); /* make table gray (again) */
+ linkgclist(t, g->grayagain);
+}
+
+
+/*
+** barrier for assignments to closed upvalues. Because upvalues are
+** shared among closures, it is impossible to know the color of all
+** closures pointing to it. So, we assume that the object being assigned
+** must be marked.
+*/
+void luaC_upvalbarrier_ (lua_State *L, UpVal *uv) {
+ global_State *g = G(L);
+ GCObject *o = gcvalue(uv->v);
+ lua_assert(!upisopen(uv)); /* ensured by macro luaC_upvalbarrier */
+ if (keepinvariant(g))
+ markobject(g, o);
+}
+
+
+void luaC_fix (lua_State *L, GCObject *o) {
+ global_State *g = G(L);
+ lua_assert(g->allgc == o); /* object must be 1st in 'allgc' list! */
+ white2gray(o); /* they will be gray forever */
+ g->allgc = o->next; /* remove object from 'allgc' list */
+ o->next = g->fixedgc; /* link it to 'fixedgc' list */
+ g->fixedgc = o;
+}
+
+
+/*
+** create a new collectable object (with given type and size) and link
+** it to 'allgc' list.
+*/
+GCObject *luaC_newobj (lua_State *L, int tt, size_t sz) {
+ global_State *g = G(L);
+ GCObject *o = cast(GCObject *, luaM_newobject(L, novariant(tt), sz));
+ o->marked = luaC_white(g);
+ o->tt = tt;
+ o->next = g->allgc;
+ g->allgc = o;
+ return o;
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** Mark functions
+** =======================================================
+*/
+
+
+/*
+** mark an object. Userdata, strings, and closed upvalues are visited
+** and turned black here. Other objects are marked gray and added
+** to appropriate list to be visited (and turned black) later. (Open
+** upvalues are already linked in 'headuv' list.)
+*/
+static void reallymarkobject (global_State *g, GCObject *o) {
+ reentry:
+ white2gray(o);
+ switch (o->tt) {
+ case LUA_TSHRSTR: {
+ gray2black(o);
+ g->GCmemtrav += sizelstring(gco2ts(o)->shrlen);
+ break;
+ }
+ case LUA_TLNGSTR: {
+ gray2black(o);
+ g->GCmemtrav += sizelstring(gco2ts(o)->u.lnglen);
+ break;
+ }
+ case LUA_TUSERDATA: {
+ TValue uvalue;
+ markobjectN(g, gco2u(o)->metatable); /* mark its metatable */
+ gray2black(o);
+ g->GCmemtrav += sizeudata(gco2u(o));
+ getuservalue(g->mainthread, gco2u(o), &uvalue);
+ if (valiswhite(&uvalue)) { /* markvalue(g, &uvalue); */
+ o = gcvalue(&uvalue);
+ goto reentry;
+ }
+ break;
+ }
+ case LUA_TLCL: {
+ linkgclist(gco2lcl(o), g->gray);
+ break;
+ }
+ case LUA_TCCL: {
+ linkgclist(gco2ccl(o), g->gray);
+ break;
+ }
+ case LUA_TTABLE: {
+ linkgclist(gco2t(o), g->gray);
+ break;
+ }
+ case LUA_TTHREAD: {
+ linkgclist(gco2th(o), g->gray);
+ break;
+ }
+ case LUA_TPROTO: {
+ linkgclist(gco2p(o), g->gray);
+ break;
+ }
+ default: lua_assert(0); break;
+ }
+}
+
+
+/*
+** mark metamethods for basic types
+*/
+static void markmt (global_State *g) {
+ int i;
+ for (i=0; i < LUA_NUMTAGS; i++)
+ markobjectN(g, g->mt[i]);
+}
+
+
+/*
+** mark all objects in list of being-finalized
+*/
+static void markbeingfnz (global_State *g) {
+ GCObject *o;
+ for (o = g->tobefnz; o != NULL; o = o->next)
+ markobject(g, o);
+}
+
+
+/*
+** Mark all values stored in marked open upvalues from non-marked threads.
+** (Values from marked threads were already marked when traversing the
+** thread.) Remove from the list threads that no longer have upvalues and
+** not-marked threads.
+*/
+static void remarkupvals (global_State *g) {
+ lua_State *thread;
+ lua_State **p = &g->twups;
+ while ((thread = *p) != NULL) {
+ lua_assert(!isblack(thread)); /* threads are never black */
+ if (isgray(thread) && thread->openupval != NULL)
+ p = &thread->twups; /* keep marked thread with upvalues in the list */
+ else { /* thread is not marked or without upvalues */
+ UpVal *uv;
+ *p = thread->twups; /* remove thread from the list */
+ thread->twups = thread; /* mark that it is out of list */
+ for (uv = thread->openupval; uv != NULL; uv = uv->u.open.next) {
+ if (uv->u.open.touched) {
+ markvalue(g, uv->v); /* remark upvalue's value */
+ uv->u.open.touched = 0;
+ }
+ }
+ }
+ }
+}
+
+
+/*
+** mark root set and reset all gray lists, to start a new collection
+*/
+static void restartcollection (global_State *g) {
+ g->gray = g->grayagain = NULL;
+ g->weak = g->allweak = g->ephemeron = NULL;
+ markobject(g, g->mainthread);
+ markvalue(g, &g->l_registry);
+ markmt(g);
+ markbeingfnz(g); /* mark any finalizing object left from previous cycle */
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Traverse functions
+** =======================================================
+*/
+
+/*
+** Traverse a table with weak values and link it to proper list. During
+** propagate phase, keep it in 'grayagain' list, to be revisited in the
+** atomic phase. In the atomic phase, if table has any white value,
+** put it in 'weak' list, to be cleared.
+*/
+static void traverseweakvalue (global_State *g, Table *h) {
+ Node *n, *limit = gnodelast(h);
+ /* if there is array part, assume it may have white values (it is not
+ worth traversing it now just to check) */
+ int hasclears = (h->sizearray > 0);
+ for (n = gnode(h, 0); n < limit; n++) { /* traverse hash part */
+ checkdeadkey(n);
+ if (ttisnil(gval(n))) /* entry is empty? */
+ removeentry(n); /* remove it */
+ else {
+ lua_assert(!ttisnil(gkey(n)));
+ markvalue(g, gkey(n)); /* mark key */
+ if (!hasclears && iscleared(g, gval(n))) /* is there a white value? */
+ hasclears = 1; /* table will have to be cleared */
+ }
+ }
+ if (g->gcstate == GCSpropagate)
+ linkgclist(h, g->grayagain); /* must retraverse it in atomic phase */
+ else if (hasclears)
+ linkgclist(h, g->weak); /* has to be cleared later */
+}
+
+
+/*
+** Traverse an ephemeron table and link it to proper list. Returns true
+** iff any object was marked during this traversal (which implies that
+** convergence has to continue). During propagation phase, keep table
+** in 'grayagain' list, to be visited again in the atomic phase. In
+** the atomic phase, if table has any white->white entry, it has to
+** be revisited during ephemeron convergence (as that key may turn
+** black). Otherwise, if it has any white key, table has to be cleared
+** (in the atomic phase).
+*/
+static int traverseephemeron (global_State *g, Table *h) {
+ int marked = 0; /* true if an object is marked in this traversal */
+ int hasclears = 0; /* true if table has white keys */
+ int hasww = 0; /* true if table has entry "white-key -> white-value" */
+ Node *n, *limit = gnodelast(h);
+ unsigned int i;
+ /* traverse array part */
+ for (i = 0; i < h->sizearray; i++) {
+ if (valiswhite(&h->array[i])) {
+ marked = 1;
+ reallymarkobject(g, gcvalue(&h->array[i]));
+ }
+ }
+ /* traverse hash part */
+ for (n = gnode(h, 0); n < limit; n++) {
+ checkdeadkey(n);
+ if (ttisnil(gval(n))) /* entry is empty? */
+ removeentry(n); /* remove it */
+ else if (iscleared(g, gkey(n))) { /* key is not marked (yet)? */
+ hasclears = 1; /* table must be cleared */
+ if (valiswhite(gval(n))) /* value not marked yet? */
+ hasww = 1; /* white-white entry */
+ }
+ else if (valiswhite(gval(n))) { /* value not marked yet? */
+ marked = 1;
+ reallymarkobject(g, gcvalue(gval(n))); /* mark it now */
+ }
+ }
+ /* link table into proper list */
+ if (g->gcstate == GCSpropagate)
+ linkgclist(h, g->grayagain); /* must retraverse it in atomic phase */
+ else if (hasww) /* table has white->white entries? */
+ linkgclist(h, g->ephemeron); /* have to propagate again */
+ else if (hasclears) /* table has white keys? */
+ linkgclist(h, g->allweak); /* may have to clean white keys */
+ return marked;
+}
+
+
+static void traversestrongtable (global_State *g, Table *h) {
+ Node *n, *limit = gnodelast(h);
+ unsigned int i;
+ for (i = 0; i < h->sizearray; i++) /* traverse array part */
+ markvalue(g, &h->array[i]);
+ for (n = gnode(h, 0); n < limit; n++) { /* traverse hash part */
+ checkdeadkey(n);
+ if (ttisnil(gval(n))) /* entry is empty? */
+ removeentry(n); /* remove it */
+ else {
+ lua_assert(!ttisnil(gkey(n)));
+ markvalue(g, gkey(n)); /* mark key */
+ markvalue(g, gval(n)); /* mark value */
+ }
+ }
+}
+
+
+static lu_mem traversetable (global_State *g, Table *h) {
+ const char *weakkey, *weakvalue;
+ const TValue *mode = gfasttm(g, h->metatable, TM_MODE);
+ markobjectN(g, h->metatable);
+ if (mode && ttisstring(mode) && /* is there a weak mode? */
+ ((weakkey = strchr(svalue(mode), 'k')),
+ (weakvalue = strchr(svalue(mode), 'v')),
+ (weakkey || weakvalue))) { /* is really weak? */
+ black2gray(h); /* keep table gray */
+ if (!weakkey) /* strong keys? */
+ traverseweakvalue(g, h);
+ else if (!weakvalue) /* strong values? */
+ traverseephemeron(g, h);
+ else /* all weak */
+ linkgclist(h, g->allweak); /* nothing to traverse now */
+ }
+ else /* not weak */
+ traversestrongtable(g, h);
+ return sizeof(Table) + sizeof(TValue) * h->sizearray +
+ sizeof(Node) * cast(size_t, allocsizenode(h));
+}
+
+
+/*
+** Traverse a prototype. (While a prototype is being build, its
+** arrays can be larger than needed; the extra slots are filled with
+** NULL, so the use of 'markobjectN')
+*/
+static int traverseproto (global_State *g, Proto *f) {
+ int i;
+ if (f->cache && iswhite(f->cache))
+ f->cache = NULL; /* allow cache to be collected */
+ markobjectN(g, f->source);
+ for (i = 0; i < f->sizek; i++) /* mark literals */
+ markvalue(g, &f->k[i]);
+ for (i = 0; i < f->sizeupvalues; i++) /* mark upvalue names */
+ markobjectN(g, f->upvalues[i].name);
+ for (i = 0; i < f->sizep; i++) /* mark nested protos */
+ markobjectN(g, f->p[i]);
+ for (i = 0; i < f->sizelocvars; i++) /* mark local-variable names */
+ markobjectN(g, f->locvars[i].varname);
+ return sizeof(Proto) + sizeof(Instruction) * f->sizecode +
+ sizeof(Proto *) * f->sizep +
+ sizeof(TValue) * f->sizek +
+ sizeof(int) * f->sizelineinfo +
+ sizeof(LocVar) * f->sizelocvars +
+ sizeof(Upvaldesc) * f->sizeupvalues;
+}
+
+
+static lu_mem traverseCclosure (global_State *g, CClosure *cl) {
+ int i;
+ for (i = 0; i < cl->nupvalues; i++) /* mark its upvalues */
+ markvalue(g, &cl->upvalue[i]);
+ return sizeCclosure(cl->nupvalues);
+}
+
+/*
+** open upvalues point to values in a thread, so those values should
+** be marked when the thread is traversed except in the atomic phase
+** (because then the value cannot be changed by the thread and the
+** thread may not be traversed again)
+*/
+static lu_mem traverseLclosure (global_State *g, LClosure *cl) {
+ int i;
+ markobjectN(g, cl->p); /* mark its prototype */
+ for (i = 0; i < cl->nupvalues; i++) { /* mark its upvalues */
+ UpVal *uv = cl->upvals[i];
+ if (uv != NULL) {
+ if (upisopen(uv) && g->gcstate != GCSinsideatomic)
+ uv->u.open.touched = 1; /* can be marked in 'remarkupvals' */
+ else
+ markvalue(g, uv->v);
+ }
+ }
+ return sizeLclosure(cl->nupvalues);
+}
+
+
+static lu_mem traversethread (global_State *g, lua_State *th) {
+ StkId o = th->stack;
+ if (o == NULL)
+ return 1; /* stack not completely built yet */
+ lua_assert(g->gcstate == GCSinsideatomic ||
+ th->openupval == NULL || isintwups(th));
+ for (; o < th->top; o++) /* mark live elements in the stack */
+ markvalue(g, o);
+ if (g->gcstate == GCSinsideatomic) { /* final traversal? */
+ StkId lim = th->stack + th->stacksize; /* real end of stack */
+ for (; o < lim; o++) /* clear not-marked stack slice */
+ setnilvalue(o);
+ /* 'remarkupvals' may have removed thread from 'twups' list */
+ if (!isintwups(th) && th->openupval != NULL) {
+ th->twups = g->twups; /* link it back to the list */
+ g->twups = th;
+ }
+ }
+ else if (g->gckind != KGC_EMERGENCY)
+ luaD_shrinkstack(th); /* do not change stack in emergency cycle */
+ return (sizeof(lua_State) + sizeof(TValue) * th->stacksize +
+ sizeof(CallInfo) * th->nci);
+}
+
+
+/*
+** traverse one gray object, turning it to black (except for threads,
+** which are always gray).
+*/
+static void propagatemark (global_State *g) {
+ lu_mem size;
+ GCObject *o = g->gray;
+ lua_assert(isgray(o));
+ gray2black(o);
+ switch (o->tt) {
+ case LUA_TTABLE: {
+ Table *h = gco2t(o);
+ g->gray = h->gclist; /* remove from 'gray' list */
+ size = traversetable(g, h);
+ break;
+ }
+ case LUA_TLCL: {
+ LClosure *cl = gco2lcl(o);
+ g->gray = cl->gclist; /* remove from 'gray' list */
+ size = traverseLclosure(g, cl);
+ break;
+ }
+ case LUA_TCCL: {
+ CClosure *cl = gco2ccl(o);
+ g->gray = cl->gclist; /* remove from 'gray' list */
+ size = traverseCclosure(g, cl);
+ break;
+ }
+ case LUA_TTHREAD: {
+ lua_State *th = gco2th(o);
+ g->gray = th->gclist; /* remove from 'gray' list */
+ linkgclist(th, g->grayagain); /* insert into 'grayagain' list */
+ black2gray(o);
+ size = traversethread(g, th);
+ break;
+ }
+ case LUA_TPROTO: {
+ Proto *p = gco2p(o);
+ g->gray = p->gclist; /* remove from 'gray' list */
+ size = traverseproto(g, p);
+ break;
+ }
+ default: lua_assert(0); return;
+ }
+ g->GCmemtrav += size;
+}
+
+
+static void propagateall (global_State *g) {
+ while (g->gray) propagatemark(g);
+}
+
+
+static void convergeephemerons (global_State *g) {
+ int changed;
+ do {
+ GCObject *w;
+ GCObject *next = g->ephemeron; /* get ephemeron list */
+ g->ephemeron = NULL; /* tables may return to this list when traversed */
+ changed = 0;
+ while ((w = next) != NULL) {
+ next = gco2t(w)->gclist;
+ if (traverseephemeron(g, gco2t(w))) { /* traverse marked some value? */
+ propagateall(g); /* propagate changes */
+ changed = 1; /* will have to revisit all ephemeron tables */
+ }
+ }
+ } while (changed);
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Sweep Functions
+** =======================================================
+*/
+
+
+/*
+** clear entries with unmarked keys from all weaktables in list 'l' up
+** to element 'f'
+*/
+static void clearkeys (global_State *g, GCObject *l, GCObject *f) {
+ for (; l != f; l = gco2t(l)->gclist) {
+ Table *h = gco2t(l);
+ Node *n, *limit = gnodelast(h);
+ for (n = gnode(h, 0); n < limit; n++) {
+ if (!ttisnil(gval(n)) && (iscleared(g, gkey(n)))) {
+ setnilvalue(gval(n)); /* remove value ... */
+ removeentry(n); /* and remove entry from table */
+ }
+ }
+ }
+}
+
+
+/*
+** clear entries with unmarked values from all weaktables in list 'l' up
+** to element 'f'
+*/
+static void clearvalues (global_State *g, GCObject *l, GCObject *f) {
+ for (; l != f; l = gco2t(l)->gclist) {
+ Table *h = gco2t(l);
+ Node *n, *limit = gnodelast(h);
+ unsigned int i;
+ for (i = 0; i < h->sizearray; i++) {
+ TValue *o = &h->array[i];
+ if (iscleared(g, o)) /* value was collected? */
+ setnilvalue(o); /* remove value */
+ }
+ for (n = gnode(h, 0); n < limit; n++) {
+ if (!ttisnil(gval(n)) && iscleared(g, gval(n))) {
+ setnilvalue(gval(n)); /* remove value ... */
+ removeentry(n); /* and remove entry from table */
+ }
+ }
+ }
+}
+
+
+void luaC_upvdeccount (lua_State *L, UpVal *uv) {
+ lua_assert(uv->refcount > 0);
+ uv->refcount--;
+ if (uv->refcount == 0 && !upisopen(uv))
+ luaM_free(L, uv);
+}
+
+
+static void freeLclosure (lua_State *L, LClosure *cl) {
+ int i;
+ for (i = 0; i < cl->nupvalues; i++) {
+ UpVal *uv = cl->upvals[i];
+ if (uv)
+ luaC_upvdeccount(L, uv);
+ }
+ luaM_freemem(L, cl, sizeLclosure(cl->nupvalues));
+}
+
+
+static void freeobj (lua_State *L, GCObject *o) {
+ switch (o->tt) {
+ case LUA_TPROTO: luaF_freeproto(L, gco2p(o)); break;
+ case LUA_TLCL: {
+ freeLclosure(L, gco2lcl(o));
+ break;
+ }
+ case LUA_TCCL: {
+ luaM_freemem(L, o, sizeCclosure(gco2ccl(o)->nupvalues));
+ break;
+ }
+ case LUA_TTABLE: luaH_free(L, gco2t(o)); break;
+ case LUA_TTHREAD: luaE_freethread(L, gco2th(o)); break;
+ case LUA_TUSERDATA: luaM_freemem(L, o, sizeudata(gco2u(o))); break;
+ case LUA_TSHRSTR:
+ luaS_remove(L, gco2ts(o)); /* remove it from hash table */
+ luaM_freemem(L, o, sizelstring(gco2ts(o)->shrlen));
+ break;
+ case LUA_TLNGSTR: {
+ luaM_freemem(L, o, sizelstring(gco2ts(o)->u.lnglen));
+ break;
+ }
+ default: lua_assert(0);
+ }
+}
+
+
+#define sweepwholelist(L,p) sweeplist(L,p,MAX_LUMEM)
+static GCObject **sweeplist (lua_State *L, GCObject **p, lu_mem count);
+
+
+/*
+** sweep at most 'count' elements from a list of GCObjects erasing dead
+** objects, where a dead object is one marked with the old (non current)
+** white; change all non-dead objects back to white, preparing for next
+** collection cycle. Return where to continue the traversal or NULL if
+** list is finished.
+*/
+static GCObject **sweeplist (lua_State *L, GCObject **p, lu_mem count) {
+ global_State *g = G(L);
+ int ow = otherwhite(g);
+ int white = luaC_white(g); /* current white */
+ while (*p != NULL && count-- > 0) {
+ GCObject *curr = *p;
+ int marked = curr->marked;
+ if (isdeadm(ow, marked)) { /* is 'curr' dead? */
+ *p = curr->next; /* remove 'curr' from list */
+ freeobj(L, curr); /* erase 'curr' */
+ }
+ else { /* change mark to 'white' */
+ curr->marked = cast_byte((marked & maskcolors) | white);
+ p = &curr->next; /* go to next element */
+ }
+ }
+ return (*p == NULL) ? NULL : p;
+}
+
+
+/*
+** sweep a list until a live object (or end of list)
+*/
+static GCObject **sweeptolive (lua_State *L, GCObject **p) {
+ GCObject **old = p;
+ do {
+ p = sweeplist(L, p, 1);
+ } while (p == old);
+ return p;
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Finalization
+** =======================================================
+*/
+
+/*
+** If possible, shrink string table
+*/
+static void checkSizes (lua_State *L, global_State *g) {
+ if (g->gckind != KGC_EMERGENCY) {
+ l_mem olddebt = g->GCdebt;
+ if (g->strt.nuse < g->strt.size / 4) /* string table too big? */
+ luaS_resize(L, g->strt.size / 2); /* shrink it a little */
+ g->GCestimate += g->GCdebt - olddebt; /* update estimate */
+ }
+}
+
+
+static GCObject *udata2finalize (global_State *g) {
+ GCObject *o = g->tobefnz; /* get first element */
+ lua_assert(tofinalize(o));
+ g->tobefnz = o->next; /* remove it from 'tobefnz' list */
+ o->next = g->allgc; /* return it to 'allgc' list */
+ g->allgc = o;
+ resetbit(o->marked, FINALIZEDBIT); /* object is "normal" again */
+ if (issweepphase(g))
+ makewhite(g, o); /* "sweep" object */
+ return o;
+}
+
+
+static void dothecall (lua_State *L, void *ud) {
+ UNUSED(ud);
+ luaD_callnoyield(L, L->top - 2, 0);
+}
+
+
+static void GCTM (lua_State *L, int propagateerrors) {
+ global_State *g = G(L);
+ const TValue *tm;
+ TValue v;
+ setgcovalue(L, &v, udata2finalize(g));
+ tm = luaT_gettmbyobj(L, &v, TM_GC);
+ if (tm != NULL && ttisfunction(tm)) { /* is there a finalizer? */
+ int status;
+ lu_byte oldah = L->allowhook;
+ int running = g->gcrunning;
+ L->allowhook = 0; /* stop debug hooks during GC metamethod */
+ g->gcrunning = 0; /* avoid GC steps */
+ setobj2s(L, L->top, tm); /* push finalizer... */
+ setobj2s(L, L->top + 1, &v); /* ... and its argument */
+ L->top += 2; /* and (next line) call the finalizer */
+ L->ci->callstatus |= CIST_FIN; /* will run a finalizer */
+ status = luaD_pcall(L, dothecall, NULL, savestack(L, L->top - 2), 0);
+ L->ci->callstatus &= ~CIST_FIN; /* not running a finalizer anymore */
+ L->allowhook = oldah; /* restore hooks */
+ g->gcrunning = running; /* restore state */
+ if (status != LUA_OK && propagateerrors) { /* error while running __gc? */
+ if (status == LUA_ERRRUN) { /* is there an error object? */
+ const char *msg = (ttisstring(L->top - 1))
+ ? svalue(L->top - 1)
+ : "no message";
+ luaO_pushfstring(L, "error in __gc metamethod (%s)", msg);
+ status = LUA_ERRGCMM; /* error in __gc metamethod */
+ }
+ luaD_throw(L, status); /* re-throw error */
+ }
+ }
+}
+
+
+/*
+** call a few (up to 'g->gcfinnum') finalizers
+*/
+static int runafewfinalizers (lua_State *L) {
+ global_State *g = G(L);
+ unsigned int i;
+ lua_assert(!g->tobefnz || g->gcfinnum > 0);
+ for (i = 0; g->tobefnz && i < g->gcfinnum; i++)
+ GCTM(L, 1); /* call one finalizer */
+ g->gcfinnum = (!g->tobefnz) ? 0 /* nothing more to finalize? */
+ : g->gcfinnum * 2; /* else call a few more next time */
+ return i;
+}
+
+
+/*
+** call all pending finalizers
+*/
+static void callallpendingfinalizers (lua_State *L) {
+ global_State *g = G(L);
+ while (g->tobefnz)
+ GCTM(L, 0);
+}
+
+
+/*
+** find last 'next' field in list 'p' list (to add elements in its end)
+*/
+static GCObject **findlast (GCObject **p) {
+ while (*p != NULL)
+ p = &(*p)->next;
+ return p;
+}
+
+
+/*
+** move all unreachable objects (or 'all' objects) that need
+** finalization from list 'finobj' to list 'tobefnz' (to be finalized)
+*/
+static void separatetobefnz (global_State *g, int all) {
+ GCObject *curr;
+ GCObject **p = &g->finobj;
+ GCObject **lastnext = findlast(&g->tobefnz);
+ while ((curr = *p) != NULL) { /* traverse all finalizable objects */
+ lua_assert(tofinalize(curr));
+ if (!(iswhite(curr) || all)) /* not being collected? */
+ p = &curr->next; /* don't bother with it */
+ else {
+ *p = curr->next; /* remove 'curr' from 'finobj' list */
+ curr->next = *lastnext; /* link at the end of 'tobefnz' list */
+ *lastnext = curr;
+ lastnext = &curr->next;
+ }
+ }
+}
+
+
+/*
+** if object 'o' has a finalizer, remove it from 'allgc' list (must
+** search the list to find it) and link it in 'finobj' list.
+*/
+void luaC_checkfinalizer (lua_State *L, GCObject *o, Table *mt) {
+ global_State *g = G(L);
+ if (tofinalize(o) || /* obj. is already marked... */
+ gfasttm(g, mt, TM_GC) == NULL) /* or has no finalizer? */
+ return; /* nothing to be done */
+ else { /* move 'o' to 'finobj' list */
+ GCObject **p;
+ if (issweepphase(g)) {
+ makewhite(g, o); /* "sweep" object 'o' */
+ if (g->sweepgc == &o->next) /* should not remove 'sweepgc' object */
+ g->sweepgc = sweeptolive(L, g->sweepgc); /* change 'sweepgc' */
+ }
+ /* search for pointer pointing to 'o' */
+ for (p = &g->allgc; *p != o; p = &(*p)->next) { /* empty */ }
+ *p = o->next; /* remove 'o' from 'allgc' list */
+ o->next = g->finobj; /* link it in 'finobj' list */
+ g->finobj = o;
+ l_setbit(o->marked, FINALIZEDBIT); /* mark it as such */
+ }
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** GC control
+** =======================================================
+*/
+
+
+/*
+** Set a reasonable "time" to wait before starting a new GC cycle; cycle
+** will start when memory use hits threshold. (Division by 'estimate'
+** should be OK: it cannot be zero (because Lua cannot even start with
+** less than PAUSEADJ bytes).
+*/
+static void setpause (global_State *g) {
+ l_mem threshold, debt;
+ l_mem estimate = g->GCestimate / PAUSEADJ; /* adjust 'estimate' */
+ lua_assert(estimate > 0);
+ threshold = (g->gcpause < MAX_LMEM / estimate) /* overflow? */
+ ? estimate * g->gcpause /* no overflow */
+ : MAX_LMEM; /* overflow; truncate to maximum */
+ debt = gettotalbytes(g) - threshold;
+ luaE_setdebt(g, debt);
+}
+
+
+/*
+** Enter first sweep phase.
+** The call to 'sweeplist' tries to make pointer point to an object
+** inside the list (instead of to the header), so that the real sweep do
+** not need to skip objects created between "now" and the start of the
+** real sweep.
+*/
+static void entersweep (lua_State *L) {
+ global_State *g = G(L);
+ g->gcstate = GCSswpallgc;
+ lua_assert(g->sweepgc == NULL);
+ g->sweepgc = sweeplist(L, &g->allgc, 1);
+}
+
+
+void luaC_freeallobjects (lua_State *L) {
+ global_State *g = G(L);
+ separatetobefnz(g, 1); /* separate all objects with finalizers */
+ lua_assert(g->finobj == NULL);
+ callallpendingfinalizers(L);
+ lua_assert(g->tobefnz == NULL);
+ g->currentwhite = WHITEBITS; /* this "white" makes all objects look dead */
+ g->gckind = KGC_NORMAL;
+ sweepwholelist(L, &g->finobj);
+ sweepwholelist(L, &g->allgc);
+ sweepwholelist(L, &g->fixedgc); /* collect fixed objects */
+ lua_assert(g->strt.nuse == 0);
+}
+
+
+static l_mem atomic (lua_State *L) {
+ global_State *g = G(L);
+ l_mem work;
+ GCObject *origweak, *origall;
+ GCObject *grayagain = g->grayagain; /* save original list */
+ lua_assert(g->ephemeron == NULL && g->weak == NULL);
+ lua_assert(!iswhite(g->mainthread));
+ g->gcstate = GCSinsideatomic;
+ g->GCmemtrav = 0; /* start counting work */
+ markobject(g, L); /* mark running thread */
+ /* registry and global metatables may be changed by API */
+ markvalue(g, &g->l_registry);
+ markmt(g); /* mark global metatables */
+ /* remark occasional upvalues of (maybe) dead threads */
+ remarkupvals(g);
+ propagateall(g); /* propagate changes */
+ work = g->GCmemtrav; /* stop counting (do not recount 'grayagain') */
+ g->gray = grayagain;
+ propagateall(g); /* traverse 'grayagain' list */
+ g->GCmemtrav = 0; /* restart counting */
+ convergeephemerons(g);
+ /* at this point, all strongly accessible objects are marked. */
+ /* Clear values from weak tables, before checking finalizers */
+ clearvalues(g, g->weak, NULL);
+ clearvalues(g, g->allweak, NULL);
+ origweak = g->weak; origall = g->allweak;
+ work += g->GCmemtrav; /* stop counting (objects being finalized) */
+ separatetobefnz(g, 0); /* separate objects to be finalized */
+ g->gcfinnum = 1; /* there may be objects to be finalized */
+ markbeingfnz(g); /* mark objects that will be finalized */
+ propagateall(g); /* remark, to propagate 'resurrection' */
+ g->GCmemtrav = 0; /* restart counting */
+ convergeephemerons(g);
+ /* at this point, all resurrected objects are marked. */
+ /* remove dead objects from weak tables */
+ clearkeys(g, g->ephemeron, NULL); /* clear keys from all ephemeron tables */
+ clearkeys(g, g->allweak, NULL); /* clear keys from all 'allweak' tables */
+ /* clear values from resurrected weak tables */
+ clearvalues(g, g->weak, origweak);
+ clearvalues(g, g->allweak, origall);
+ luaS_clearcache(g);
+ g->currentwhite = cast_byte(otherwhite(g)); /* flip current white */
+ work += g->GCmemtrav; /* complete counting */
+ return work; /* estimate of memory marked by 'atomic' */
+}
+
+
+static lu_mem sweepstep (lua_State *L, global_State *g,
+ int nextstate, GCObject **nextlist) {
+ if (g->sweepgc) {
+ l_mem olddebt = g->GCdebt;
+ g->sweepgc = sweeplist(L, g->sweepgc, GCSWEEPMAX);
+ g->GCestimate += g->GCdebt - olddebt; /* update estimate */
+ if (g->sweepgc) /* is there still something to sweep? */
+ return (GCSWEEPMAX * GCSWEEPCOST);
+ }
+ /* else enter next state */
+ g->gcstate = nextstate;
+ g->sweepgc = nextlist;
+ return 0;
+}
+
+
+static lu_mem singlestep (lua_State *L) {
+ global_State *g = G(L);
+ switch (g->gcstate) {
+ case GCSpause: {
+ g->GCmemtrav = g->strt.size * sizeof(GCObject*);
+ restartcollection(g);
+ g->gcstate = GCSpropagate;
+ return g->GCmemtrav;
+ }
+ case GCSpropagate: {
+ g->GCmemtrav = 0;
+ lua_assert(g->gray);
+ propagatemark(g);
+ if (g->gray == NULL) /* no more gray objects? */
+ g->gcstate = GCSatomic; /* finish propagate phase */
+ return g->GCmemtrav; /* memory traversed in this step */
+ }
+ case GCSatomic: {
+ lu_mem work;
+ propagateall(g); /* make sure gray list is empty */
+ work = atomic(L); /* work is what was traversed by 'atomic' */
+ entersweep(L);
+ g->GCestimate = gettotalbytes(g); /* first estimate */;
+ return work;
+ }
+ case GCSswpallgc: { /* sweep "regular" objects */
+ return sweepstep(L, g, GCSswpfinobj, &g->finobj);
+ }
+ case GCSswpfinobj: { /* sweep objects with finalizers */
+ return sweepstep(L, g, GCSswptobefnz, &g->tobefnz);
+ }
+ case GCSswptobefnz: { /* sweep objects to be finalized */
+ return sweepstep(L, g, GCSswpend, NULL);
+ }
+ case GCSswpend: { /* finish sweeps */
+ makewhite(g, g->mainthread); /* sweep main thread */
+ checkSizes(L, g);
+ g->gcstate = GCScallfin;
+ return 0;
+ }
+ case GCScallfin: { /* call remaining finalizers */
+ if (g->tobefnz && g->gckind != KGC_EMERGENCY) {
+ int n = runafewfinalizers(L);
+ return (n * GCFINALIZECOST);
+ }
+ else { /* emergency mode or no more finalizers */
+ g->gcstate = GCSpause; /* finish collection */
+ return 0;
+ }
+ }
+ default: lua_assert(0); return 0;
+ }
+}
+
+
+/*
+** advances the garbage collector until it reaches a state allowed
+** by 'statemask'
+*/
+void luaC_runtilstate (lua_State *L, int statesmask) {
+ global_State *g = G(L);
+ while (!testbit(statesmask, g->gcstate))
+ singlestep(L);
+}
+
+
+/*
+** get GC debt and convert it from Kb to 'work units' (avoid zero debt
+** and overflows)
+*/
+static l_mem getdebt (global_State *g) {
+ l_mem debt = g->GCdebt;
+ int stepmul = g->gcstepmul;
+ if (debt <= 0) return 0; /* minimal debt */
+ else {
+ debt = (debt / STEPMULADJ) + 1;
+ debt = (debt < MAX_LMEM / stepmul) ? debt * stepmul : MAX_LMEM;
+ return debt;
+ }
+}
+
+/*
+** performs a basic GC step when collector is running
+*/
+void luaC_step (lua_State *L) {
+ global_State *g = G(L);
+ l_mem debt = getdebt(g); /* GC deficit (be paid now) */
+ if (!g->gcrunning) { /* not running? */
+ luaE_setdebt(g, -GCSTEPSIZE * 10); /* avoid being called too often */
+ return;
+ }
+ do { /* repeat until pause or enough "credit" (negative debt) */
+ lu_mem work = singlestep(L); /* perform one single step */
+ debt -= work;
+ } while (debt > -GCSTEPSIZE && g->gcstate != GCSpause);
+ if (g->gcstate == GCSpause)
+ setpause(g); /* pause until next cycle */
+ else {
+ debt = (debt / g->gcstepmul) * STEPMULADJ; /* convert 'work units' to Kb */
+ luaE_setdebt(g, debt);
+ runafewfinalizers(L);
+ }
+}
+
+
+/*
+** Performs a full GC cycle; if 'isemergency', set a flag to avoid
+** some operations which could change the interpreter state in some
+** unexpected ways (running finalizers and shrinking some structures).
+** Before running the collection, check 'keepinvariant'; if it is true,
+** there may be some objects marked as black, so the collector has
+** to sweep all objects to turn them back to white (as white has not
+** changed, nothing will be collected).
+*/
+void luaC_fullgc (lua_State *L, int isemergency) {
+ global_State *g = G(L);
+ lua_assert(g->gckind == KGC_NORMAL);
+ if (isemergency) g->gckind = KGC_EMERGENCY; /* set flag */
+ if (keepinvariant(g)) { /* black objects? */
+ entersweep(L); /* sweep everything to turn them back to white */
+ }
+ /* finish any pending sweep phase to start a new cycle */
+ luaC_runtilstate(L, bitmask(GCSpause));
+ luaC_runtilstate(L, ~bitmask(GCSpause)); /* start new collection */
+ luaC_runtilstate(L, bitmask(GCScallfin)); /* run up to finalizers */
+ /* estimate must be correct after a full GC cycle */
+ lua_assert(g->GCestimate == gettotalbytes(g));
+ luaC_runtilstate(L, bitmask(GCSpause)); /* finish collection */
+ g->gckind = KGC_NORMAL;
+ setpause(g);
+}
+
+/* }====================================================== */
+
+
diff --git a/lua/src/lgc.h b/lua/src/lgc.h
new file mode 100644
index 000000000..aed3e18a5
--- /dev/null
+++ b/lua/src/lgc.h
@@ -0,0 +1,147 @@
+/*
+** $Id: lgc.h,v 2.91 2015/12/21 13:02:14 roberto Exp $
+** Garbage Collector
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lgc_h
+#define lgc_h
+
+
+#include "lobject.h"
+#include "lstate.h"
+
+/*
+** Collectable objects may have one of three colors: white, which
+** means the object is not marked; gray, which means the
+** object is marked, but its references may be not marked; and
+** black, which means that the object and all its references are marked.
+** The main invariant of the garbage collector, while marking objects,
+** is that a black object can never point to a white one. Moreover,
+** any gray object must be in a "gray list" (gray, grayagain, weak,
+** allweak, ephemeron) so that it can be visited again before finishing
+** the collection cycle. These lists have no meaning when the invariant
+** is not being enforced (e.g., sweep phase).
+*/
+
+
+
+/* how much to allocate before next GC step */
+#if !defined(GCSTEPSIZE)
+/* ~100 small strings */
+#define GCSTEPSIZE (cast_int(100 * sizeof(TString)))
+#endif
+
+
+/*
+** Possible states of the Garbage Collector
+*/
+#define GCSpropagate 0
+#define GCSatomic 1
+#define GCSswpallgc 2
+#define GCSswpfinobj 3
+#define GCSswptobefnz 4
+#define GCSswpend 5
+#define GCScallfin 6
+#define GCSpause 7
+
+
+#define issweepphase(g) \
+ (GCSswpallgc <= (g)->gcstate && (g)->gcstate <= GCSswpend)
+
+
+/*
+** macro to tell when main invariant (white objects cannot point to black
+** ones) must be kept. During a collection, the sweep
+** phase may break the invariant, as objects turned white may point to
+** still-black objects. The invariant is restored when sweep ends and
+** all objects are white again.
+*/
+
+#define keepinvariant(g) ((g)->gcstate <= GCSatomic)
+
+
+/*
+** some useful bit tricks
+*/
+#define resetbits(x,m) ((x) &= cast(lu_byte, ~(m)))
+#define setbits(x,m) ((x) |= (m))
+#define testbits(x,m) ((x) & (m))
+#define bitmask(b) (1<<(b))
+#define bit2mask(b1,b2) (bitmask(b1) | bitmask(b2))
+#define l_setbit(x,b) setbits(x, bitmask(b))
+#define resetbit(x,b) resetbits(x, bitmask(b))
+#define testbit(x,b) testbits(x, bitmask(b))
+
+
+/* Layout for bit use in 'marked' field: */
+#define WHITE0BIT 0 /* object is white (type 0) */
+#define WHITE1BIT 1 /* object is white (type 1) */
+#define BLACKBIT 2 /* object is black */
+#define FINALIZEDBIT 3 /* object has been marked for finalization */
+/* bit 7 is currently used by tests (luaL_checkmemory) */
+
+#define WHITEBITS bit2mask(WHITE0BIT, WHITE1BIT)
+
+
+#define iswhite(x) testbits((x)->marked, WHITEBITS)
+#define isblack(x) testbit((x)->marked, BLACKBIT)
+#define isgray(x) /* neither white nor black */ \
+ (!testbits((x)->marked, WHITEBITS | bitmask(BLACKBIT)))
+
+#define tofinalize(x) testbit((x)->marked, FINALIZEDBIT)
+
+#define otherwhite(g) ((g)->currentwhite ^ WHITEBITS)
+#define isdeadm(ow,m) (!(((m) ^ WHITEBITS) & (ow)))
+#define isdead(g,v) isdeadm(otherwhite(g), (v)->marked)
+
+#define changewhite(x) ((x)->marked ^= WHITEBITS)
+#define gray2black(x) l_setbit((x)->marked, BLACKBIT)
+
+#define luaC_white(g) cast(lu_byte, (g)->currentwhite & WHITEBITS)
+
+
+/*
+** Does one step of collection when debt becomes positive. 'pre'/'pos'
+** allows some adjustments to be done only when needed. macro
+** 'condchangemem' is used only for heavy tests (forcing a full
+** GC cycle on every opportunity)
+*/
+#define luaC_condGC(L,pre,pos) \
+ { if (G(L)->GCdebt > 0) { pre; luaC_step(L); pos;}; \
+ condchangemem(L,pre,pos); }
+
+/* more often than not, 'pre'/'pos' are empty */
+#define luaC_checkGC(L) luaC_condGC(L,(void)0,(void)0)
+
+
+#define luaC_barrier(L,p,v) ( \
+ (iscollectable(v) && isblack(p) && iswhite(gcvalue(v))) ? \
+ luaC_barrier_(L,obj2gco(p),gcvalue(v)) : cast_void(0))
+
+#define luaC_barrierback(L,p,v) ( \
+ (iscollectable(v) && isblack(p) && iswhite(gcvalue(v))) ? \
+ luaC_barrierback_(L,p) : cast_void(0))
+
+#define luaC_objbarrier(L,p,o) ( \
+ (isblack(p) && iswhite(o)) ? \
+ luaC_barrier_(L,obj2gco(p),obj2gco(o)) : cast_void(0))
+
+#define luaC_upvalbarrier(L,uv) ( \
+ (iscollectable((uv)->v) && !upisopen(uv)) ? \
+ luaC_upvalbarrier_(L,uv) : cast_void(0))
+
+LUAI_FUNC void luaC_fix (lua_State *L, GCObject *o);
+LUAI_FUNC void luaC_freeallobjects (lua_State *L);
+LUAI_FUNC void luaC_step (lua_State *L);
+LUAI_FUNC void luaC_runtilstate (lua_State *L, int statesmask);
+LUAI_FUNC void luaC_fullgc (lua_State *L, int isemergency);
+LUAI_FUNC GCObject *luaC_newobj (lua_State *L, int tt, size_t sz);
+LUAI_FUNC void luaC_barrier_ (lua_State *L, GCObject *o, GCObject *v);
+LUAI_FUNC void luaC_barrierback_ (lua_State *L, Table *o);
+LUAI_FUNC void luaC_upvalbarrier_ (lua_State *L, UpVal *uv);
+LUAI_FUNC void luaC_checkfinalizer (lua_State *L, GCObject *o, Table *mt);
+LUAI_FUNC void luaC_upvdeccount (lua_State *L, UpVal *uv);
+
+
+#endif
diff --git a/lua/src/linit.c b/lua/src/linit.c
new file mode 100644
index 000000000..afcaf98b2
--- /dev/null
+++ b/lua/src/linit.c
@@ -0,0 +1,68 @@
+/*
+** $Id: linit.c,v 1.39 2016/12/04 20:17:24 roberto Exp $
+** Initialization of libraries for lua.c and other clients
+** See Copyright Notice in lua.h
+*/
+
+
+#define linit_c
+#define LUA_LIB
+
+/*
+** If you embed Lua in your program and need to open the standard
+** libraries, call luaL_openlibs in your program. If you need a
+** different set of libraries, copy this file to your project and edit
+** it to suit your needs.
+**
+** You can also *preload* libraries, so that a later 'require' can
+** open the library, which is already linked to the application.
+** For that, do the following code:
+**
+** luaL_getsubtable(L, LUA_REGISTRYINDEX, LUA_PRELOAD_TABLE);
+** lua_pushcfunction(L, luaopen_modname);
+** lua_setfield(L, -2, modname);
+** lua_pop(L, 1); // remove PRELOAD table
+*/
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+
+#include "lua.h"
+
+#include "lualib.h"
+#include "lauxlib.h"
+
+
+/*
+** these libs are loaded by lua.c and are readily available to any Lua
+** program
+*/
+static const luaL_Reg loadedlibs[] = {
+ {"_G", luaopen_base},
+ {LUA_LOADLIBNAME, luaopen_package},
+ {LUA_COLIBNAME, luaopen_coroutine},
+ {LUA_TABLIBNAME, luaopen_table},
+ {LUA_IOLIBNAME, luaopen_io},
+ {LUA_OSLIBNAME, luaopen_os},
+ {LUA_STRLIBNAME, luaopen_string},
+ {LUA_MATHLIBNAME, luaopen_math},
+ {LUA_UTF8LIBNAME, luaopen_utf8},
+ {LUA_DBLIBNAME, luaopen_debug},
+#if defined(LUA_COMPAT_BITLIB)
+ {LUA_BITLIBNAME, luaopen_bit32},
+#endif
+ {NULL, NULL}
+};
+
+
+LUALIB_API void luaL_openlibs (lua_State *L) {
+ const luaL_Reg *lib;
+ /* "require" functions from 'loadedlibs' and set results to global table */
+ for (lib = loadedlibs; lib->func; lib++) {
+ luaL_requiref(L, lib->name, lib->func, 1);
+ lua_pop(L, 1); /* remove lib */
+ }
+}
+
diff --git a/lua/src/liolib.c b/lua/src/liolib.c
new file mode 100644
index 000000000..156840358
--- /dev/null
+++ b/lua/src/liolib.c
@@ -0,0 +1,771 @@
+/*
+** $Id: liolib.c,v 2.151 2016/12/20 18:37:00 roberto Exp $
+** Standard I/O (and system) library
+** See Copyright Notice in lua.h
+*/
+
+#define liolib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <ctype.h>
+#include <errno.h>
+#include <locale.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+
+
+/*
+** Change this macro to accept other modes for 'fopen' besides
+** the standard ones.
+*/
+#if !defined(l_checkmode)
+
+/* accepted extensions to 'mode' in 'fopen' */
+#if !defined(L_MODEEXT)
+#define L_MODEEXT "b"
+#endif
+
+/* Check whether 'mode' matches '[rwa]%+?[L_MODEEXT]*' */
+static int l_checkmode (const char *mode) {
+ return (*mode != '\0' && strchr("rwa", *(mode++)) != NULL &&
+ (*mode != '+' || (++mode, 1)) && /* skip if char is '+' */
+ (strspn(mode, L_MODEEXT) == strlen(mode))); /* check extensions */
+}
+
+#endif
+
+/*
+** {======================================================
+** l_popen spawns a new process connected to the current
+** one through the file streams.
+** =======================================================
+*/
+
+#if !defined(l_popen) /* { */
+
+#if defined(LUA_USE_POSIX) /* { */
+
+#define l_popen(L,c,m) (fflush(NULL), popen(c,m))
+#define l_pclose(L,file) (pclose(file))
+
+#elif defined(LUA_USE_WINDOWS) /* }{ */
+
+#define l_popen(L,c,m) (_popen(c,m))
+#define l_pclose(L,file) (_pclose(file))
+
+#else /* }{ */
+
+/* ISO C definitions */
+#define l_popen(L,c,m) \
+ ((void)((void)c, m), \
+ luaL_error(L, "'popen' not supported"), \
+ (FILE*)0)
+#define l_pclose(L,file) ((void)L, (void)file, -1)
+
+#endif /* } */
+
+#endif /* } */
+
+/* }====================================================== */
+
+
+#if !defined(l_getc) /* { */
+
+#if defined(LUA_USE_POSIX)
+#define l_getc(f) getc_unlocked(f)
+#define l_lockfile(f) flockfile(f)
+#define l_unlockfile(f) funlockfile(f)
+#else
+#define l_getc(f) getc(f)
+#define l_lockfile(f) ((void)0)
+#define l_unlockfile(f) ((void)0)
+#endif
+
+#endif /* } */
+
+
+/*
+** {======================================================
+** l_fseek: configuration for longer offsets
+** =======================================================
+*/
+
+#if !defined(l_fseek) /* { */
+
+#if defined(LUA_USE_POSIX) /* { */
+
+#include <sys/types.h>
+
+#define l_fseek(f,o,w) fseeko(f,o,w)
+#define l_ftell(f) ftello(f)
+#define l_seeknum off_t
+
+#elif defined(LUA_USE_WINDOWS) && !defined(_CRTIMP_TYPEINFO) \
+ && defined(_MSC_VER) && (_MSC_VER >= 1400) /* }{ */
+
+/* Windows (but not DDK) and Visual C++ 2005 or higher */
+#define l_fseek(f,o,w) _fseeki64(f,o,w)
+#define l_ftell(f) _ftelli64(f)
+#define l_seeknum __int64
+
+#else /* }{ */
+
+/* ISO C definitions */
+#define l_fseek(f,o,w) fseek(f,o,w)
+#define l_ftell(f) ftell(f)
+#define l_seeknum long
+
+#endif /* } */
+
+#endif /* } */
+
+/* }====================================================== */
+
+
+#define IO_PREFIX "_IO_"
+#define IOPREF_LEN (sizeof(IO_PREFIX)/sizeof(char) - 1)
+#define IO_INPUT (IO_PREFIX "input")
+#define IO_OUTPUT (IO_PREFIX "output")
+
+
+typedef luaL_Stream LStream;
+
+
+#define tolstream(L) ((LStream *)luaL_checkudata(L, 1, LUA_FILEHANDLE))
+
+#define isclosed(p) ((p)->closef == NULL)
+
+
+static int io_type (lua_State *L) {
+ LStream *p;
+ luaL_checkany(L, 1);
+ p = (LStream *)luaL_testudata(L, 1, LUA_FILEHANDLE);
+ if (p == NULL)
+ lua_pushnil(L); /* not a file */
+ else if (isclosed(p))
+ lua_pushliteral(L, "closed file");
+ else
+ lua_pushliteral(L, "file");
+ return 1;
+}
+
+
+static int f_tostring (lua_State *L) {
+ LStream *p = tolstream(L);
+ if (isclosed(p))
+ lua_pushliteral(L, "file (closed)");
+ else
+ lua_pushfstring(L, "file (%p)", p->f);
+ return 1;
+}
+
+
+static FILE *tofile (lua_State *L) {
+ LStream *p = tolstream(L);
+ if (isclosed(p))
+ luaL_error(L, "attempt to use a closed file");
+ lua_assert(p->f);
+ return p->f;
+}
+
+
+/*
+** When creating file handles, always creates a 'closed' file handle
+** before opening the actual file; so, if there is a memory error, the
+** handle is in a consistent state.
+*/
+static LStream *newprefile (lua_State *L) {
+ LStream *p = (LStream *)lua_newuserdata(L, sizeof(LStream));
+ p->closef = NULL; /* mark file handle as 'closed' */
+ luaL_setmetatable(L, LUA_FILEHANDLE);
+ return p;
+}
+
+
+/*
+** Calls the 'close' function from a file handle. The 'volatile' avoids
+** a bug in some versions of the Clang compiler (e.g., clang 3.0 for
+** 32 bits).
+*/
+static int aux_close (lua_State *L) {
+ LStream *p = tolstream(L);
+ volatile lua_CFunction cf = p->closef;
+ p->closef = NULL; /* mark stream as closed */
+ return (*cf)(L); /* close it */
+}
+
+
+static int io_close (lua_State *L) {
+ if (lua_isnone(L, 1)) /* no argument? */
+ lua_getfield(L, LUA_REGISTRYINDEX, IO_OUTPUT); /* use standard output */
+ tofile(L); /* make sure argument is an open stream */
+ return aux_close(L);
+}
+
+
+static int f_gc (lua_State *L) {
+ LStream *p = tolstream(L);
+ if (!isclosed(p) && p->f != NULL)
+ aux_close(L); /* ignore closed and incompletely open files */
+ return 0;
+}
+
+
+/*
+** function to close regular files
+*/
+static int io_fclose (lua_State *L) {
+ LStream *p = tolstream(L);
+ int res = fclose(p->f);
+ return luaL_fileresult(L, (res == 0), NULL);
+}
+
+
+static LStream *newfile (lua_State *L) {
+ LStream *p = newprefile(L);
+ p->f = NULL;
+ p->closef = &io_fclose;
+ return p;
+}
+
+
+static void opencheck (lua_State *L, const char *fname, const char *mode) {
+ LStream *p = newfile(L);
+ p->f = fopen(fname, mode);
+ if (p->f == NULL)
+ luaL_error(L, "cannot open file '%s' (%s)", fname, strerror(errno));
+}
+
+
+static int io_open (lua_State *L) {
+ const char *filename = luaL_checkstring(L, 1);
+ const char *mode = luaL_optstring(L, 2, "r");
+ LStream *p = newfile(L);
+ const char *md = mode; /* to traverse/check mode */
+ luaL_argcheck(L, l_checkmode(md), 2, "invalid mode");
+ p->f = fopen(filename, mode);
+ return (p->f == NULL) ? luaL_fileresult(L, 0, filename) : 1;
+}
+
+
+/*
+** function to close 'popen' files
+*/
+static int io_pclose (lua_State *L) {
+ LStream *p = tolstream(L);
+ return luaL_execresult(L, l_pclose(L, p->f));
+}
+
+
+static int io_popen (lua_State *L) {
+ const char *filename = luaL_checkstring(L, 1);
+ const char *mode = luaL_optstring(L, 2, "r");
+ LStream *p = newprefile(L);
+ p->f = l_popen(L, filename, mode);
+ p->closef = &io_pclose;
+ return (p->f == NULL) ? luaL_fileresult(L, 0, filename) : 1;
+}
+
+
+static int io_tmpfile (lua_State *L) {
+ LStream *p = newfile(L);
+ p->f = tmpfile();
+ return (p->f == NULL) ? luaL_fileresult(L, 0, NULL) : 1;
+}
+
+
+static FILE *getiofile (lua_State *L, const char *findex) {
+ LStream *p;
+ lua_getfield(L, LUA_REGISTRYINDEX, findex);
+ p = (LStream *)lua_touserdata(L, -1);
+ if (isclosed(p))
+ luaL_error(L, "standard %s file is closed", findex + IOPREF_LEN);
+ return p->f;
+}
+
+
+static int g_iofile (lua_State *L, const char *f, const char *mode) {
+ if (!lua_isnoneornil(L, 1)) {
+ const char *filename = lua_tostring(L, 1);
+ if (filename)
+ opencheck(L, filename, mode);
+ else {
+ tofile(L); /* check that it's a valid file handle */
+ lua_pushvalue(L, 1);
+ }
+ lua_setfield(L, LUA_REGISTRYINDEX, f);
+ }
+ /* return current value */
+ lua_getfield(L, LUA_REGISTRYINDEX, f);
+ return 1;
+}
+
+
+static int io_input (lua_State *L) {
+ return g_iofile(L, IO_INPUT, "r");
+}
+
+
+static int io_output (lua_State *L) {
+ return g_iofile(L, IO_OUTPUT, "w");
+}
+
+
+static int io_readline (lua_State *L);
+
+
+/*
+** maximum number of arguments to 'f:lines'/'io.lines' (it + 3 must fit
+** in the limit for upvalues of a closure)
+*/
+#define MAXARGLINE 250
+
+static void aux_lines (lua_State *L, int toclose) {
+ int n = lua_gettop(L) - 1; /* number of arguments to read */
+ luaL_argcheck(L, n <= MAXARGLINE, MAXARGLINE + 2, "too many arguments");
+ lua_pushinteger(L, n); /* number of arguments to read */
+ lua_pushboolean(L, toclose); /* close/not close file when finished */
+ lua_rotate(L, 2, 2); /* move 'n' and 'toclose' to their positions */
+ lua_pushcclosure(L, io_readline, 3 + n);
+}
+
+
+static int f_lines (lua_State *L) {
+ tofile(L); /* check that it's a valid file handle */
+ aux_lines(L, 0);
+ return 1;
+}
+
+
+static int io_lines (lua_State *L) {
+ int toclose;
+ if (lua_isnone(L, 1)) lua_pushnil(L); /* at least one argument */
+ if (lua_isnil(L, 1)) { /* no file name? */
+ lua_getfield(L, LUA_REGISTRYINDEX, IO_INPUT); /* get default input */
+ lua_replace(L, 1); /* put it at index 1 */
+ tofile(L); /* check that it's a valid file handle */
+ toclose = 0; /* do not close it after iteration */
+ }
+ else { /* open a new file */
+ const char *filename = luaL_checkstring(L, 1);
+ opencheck(L, filename, "r");
+ lua_replace(L, 1); /* put file at index 1 */
+ toclose = 1; /* close it after iteration */
+ }
+ aux_lines(L, toclose);
+ return 1;
+}
+
+
+/*
+** {======================================================
+** READ
+** =======================================================
+*/
+
+
+/* maximum length of a numeral */
+#if !defined (L_MAXLENNUM)
+#define L_MAXLENNUM 200
+#endif
+
+
+/* auxiliary structure used by 'read_number' */
+typedef struct {
+ FILE *f; /* file being read */
+ int c; /* current character (look ahead) */
+ int n; /* number of elements in buffer 'buff' */
+ char buff[L_MAXLENNUM + 1]; /* +1 for ending '\0' */
+} RN;
+
+
+/*
+** Add current char to buffer (if not out of space) and read next one
+*/
+static int nextc (RN *rn) {
+ if (rn->n >= L_MAXLENNUM) { /* buffer overflow? */
+ rn->buff[0] = '\0'; /* invalidate result */
+ return 0; /* fail */
+ }
+ else {
+ rn->buff[rn->n++] = rn->c; /* save current char */
+ rn->c = l_getc(rn->f); /* read next one */
+ return 1;
+ }
+}
+
+
+/*
+** Accept current char if it is in 'set' (of size 2)
+*/
+static int test2 (RN *rn, const char *set) {
+ if (rn->c == set[0] || rn->c == set[1])
+ return nextc(rn);
+ else return 0;
+}
+
+
+/*
+** Read a sequence of (hex)digits
+*/
+static int readdigits (RN *rn, int hex) {
+ int count = 0;
+ while ((hex ? isxdigit(rn->c) : isdigit(rn->c)) && nextc(rn))
+ count++;
+ return count;
+}
+
+
+/*
+** Read a number: first reads a valid prefix of a numeral into a buffer.
+** Then it calls 'lua_stringtonumber' to check whether the format is
+** correct and to convert it to a Lua number
+*/
+static int read_number (lua_State *L, FILE *f) {
+ RN rn;
+ int count = 0;
+ int hex = 0;
+ char decp[2];
+ rn.f = f; rn.n = 0;
+ decp[0] = lua_getlocaledecpoint(); /* get decimal point from locale */
+ decp[1] = '.'; /* always accept a dot */
+ l_lockfile(rn.f);
+ do { rn.c = l_getc(rn.f); } while (isspace(rn.c)); /* skip spaces */
+ test2(&rn, "-+"); /* optional signal */
+ if (test2(&rn, "00")) {
+ if (test2(&rn, "xX")) hex = 1; /* numeral is hexadecimal */
+ else count = 1; /* count initial '0' as a valid digit */
+ }
+ count += readdigits(&rn, hex); /* integral part */
+ if (test2(&rn, decp)) /* decimal point? */
+ count += readdigits(&rn, hex); /* fractional part */
+ if (count > 0 && test2(&rn, (hex ? "pP" : "eE"))) { /* exponent mark? */
+ test2(&rn, "-+"); /* exponent signal */
+ readdigits(&rn, 0); /* exponent digits */
+ }
+ ungetc(rn.c, rn.f); /* unread look-ahead char */
+ l_unlockfile(rn.f);
+ rn.buff[rn.n] = '\0'; /* finish string */
+ if (lua_stringtonumber(L, rn.buff)) /* is this a valid number? */
+ return 1; /* ok */
+ else { /* invalid format */
+ lua_pushnil(L); /* "result" to be removed */
+ return 0; /* read fails */
+ }
+}
+
+
+static int test_eof (lua_State *L, FILE *f) {
+ int c = getc(f);
+ ungetc(c, f); /* no-op when c == EOF */
+ lua_pushliteral(L, "");
+ return (c != EOF);
+}
+
+
+static int read_line (lua_State *L, FILE *f, int chop) {
+ luaL_Buffer b;
+ int c = '\0';
+ luaL_buffinit(L, &b);
+ while (c != EOF && c != '\n') { /* repeat until end of line */
+ char *buff = luaL_prepbuffer(&b); /* preallocate buffer */
+ int i = 0;
+ l_lockfile(f); /* no memory errors can happen inside the lock */
+ while (i < LUAL_BUFFERSIZE && (c = l_getc(f)) != EOF && c != '\n')
+ buff[i++] = c;
+ l_unlockfile(f);
+ luaL_addsize(&b, i);
+ }
+ if (!chop && c == '\n') /* want a newline and have one? */
+ luaL_addchar(&b, c); /* add ending newline to result */
+ luaL_pushresult(&b); /* close buffer */
+ /* return ok if read something (either a newline or something else) */
+ return (c == '\n' || lua_rawlen(L, -1) > 0);
+}
+
+
+static void read_all (lua_State *L, FILE *f) {
+ size_t nr;
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ do { /* read file in chunks of LUAL_BUFFERSIZE bytes */
+ char *p = luaL_prepbuffer(&b);
+ nr = fread(p, sizeof(char), LUAL_BUFFERSIZE, f);
+ luaL_addsize(&b, nr);
+ } while (nr == LUAL_BUFFERSIZE);
+ luaL_pushresult(&b); /* close buffer */
+}
+
+
+static int read_chars (lua_State *L, FILE *f, size_t n) {
+ size_t nr; /* number of chars actually read */
+ char *p;
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ p = luaL_prepbuffsize(&b, n); /* prepare buffer to read whole block */
+ nr = fread(p, sizeof(char), n, f); /* try to read 'n' chars */
+ luaL_addsize(&b, nr);
+ luaL_pushresult(&b); /* close buffer */
+ return (nr > 0); /* true iff read something */
+}
+
+
+static int g_read (lua_State *L, FILE *f, int first) {
+ int nargs = lua_gettop(L) - 1;
+ int success;
+ int n;
+ clearerr(f);
+ if (nargs == 0) { /* no arguments? */
+ success = read_line(L, f, 1);
+ n = first+1; /* to return 1 result */
+ }
+ else { /* ensure stack space for all results and for auxlib's buffer */
+ luaL_checkstack(L, nargs+LUA_MINSTACK, "too many arguments");
+ success = 1;
+ for (n = first; nargs-- && success; n++) {
+ if (lua_type(L, n) == LUA_TNUMBER) {
+ size_t l = (size_t)luaL_checkinteger(L, n);
+ success = (l == 0) ? test_eof(L, f) : read_chars(L, f, l);
+ }
+ else {
+ const char *p = luaL_checkstring(L, n);
+ if (*p == '*') p++; /* skip optional '*' (for compatibility) */
+ switch (*p) {
+ case 'n': /* number */
+ success = read_number(L, f);
+ break;
+ case 'l': /* line */
+ success = read_line(L, f, 1);
+ break;
+ case 'L': /* line with end-of-line */
+ success = read_line(L, f, 0);
+ break;
+ case 'a': /* file */
+ read_all(L, f); /* read entire file */
+ success = 1; /* always success */
+ break;
+ default:
+ return luaL_argerror(L, n, "invalid format");
+ }
+ }
+ }
+ }
+ if (ferror(f))
+ return luaL_fileresult(L, 0, NULL);
+ if (!success) {
+ lua_pop(L, 1); /* remove last result */
+ lua_pushnil(L); /* push nil instead */
+ }
+ return n - first;
+}
+
+
+static int io_read (lua_State *L) {
+ return g_read(L, getiofile(L, IO_INPUT), 1);
+}
+
+
+static int f_read (lua_State *L) {
+ return g_read(L, tofile(L), 2);
+}
+
+
+static int io_readline (lua_State *L) {
+ LStream *p = (LStream *)lua_touserdata(L, lua_upvalueindex(1));
+ int i;
+ int n = (int)lua_tointeger(L, lua_upvalueindex(2));
+ if (isclosed(p)) /* file is already closed? */
+ return luaL_error(L, "file is already closed");
+ lua_settop(L , 1);
+ luaL_checkstack(L, n, "too many arguments");
+ for (i = 1; i <= n; i++) /* push arguments to 'g_read' */
+ lua_pushvalue(L, lua_upvalueindex(3 + i));
+ n = g_read(L, p->f, 2); /* 'n' is number of results */
+ lua_assert(n > 0); /* should return at least a nil */
+ if (lua_toboolean(L, -n)) /* read at least one value? */
+ return n; /* return them */
+ else { /* first result is nil: EOF or error */
+ if (n > 1) { /* is there error information? */
+ /* 2nd result is error message */
+ return luaL_error(L, "%s", lua_tostring(L, -n + 1));
+ }
+ if (lua_toboolean(L, lua_upvalueindex(3))) { /* generator created file? */
+ lua_settop(L, 0);
+ lua_pushvalue(L, lua_upvalueindex(1));
+ aux_close(L); /* close it */
+ }
+ return 0;
+ }
+}
+
+/* }====================================================== */
+
+
+static int g_write (lua_State *L, FILE *f, int arg) {
+ int nargs = lua_gettop(L) - arg;
+ int status = 1;
+ for (; nargs--; arg++) {
+ if (lua_type(L, arg) == LUA_TNUMBER) {
+ /* optimization: could be done exactly as for strings */
+ int len = lua_isinteger(L, arg)
+ ? fprintf(f, LUA_INTEGER_FMT,
+ (LUAI_UACINT)lua_tointeger(L, arg))
+ : fprintf(f, LUA_NUMBER_FMT,
+ (LUAI_UACNUMBER)lua_tonumber(L, arg));
+ status = status && (len > 0);
+ }
+ else {
+ size_t l;
+ const char *s = luaL_checklstring(L, arg, &l);
+ status = status && (fwrite(s, sizeof(char), l, f) == l);
+ }
+ }
+ if (status) return 1; /* file handle already on stack top */
+ else return luaL_fileresult(L, status, NULL);
+}
+
+
+static int io_write (lua_State *L) {
+ return g_write(L, getiofile(L, IO_OUTPUT), 1);
+}
+
+
+static int f_write (lua_State *L) {
+ FILE *f = tofile(L);
+ lua_pushvalue(L, 1); /* push file at the stack top (to be returned) */
+ return g_write(L, f, 2);
+}
+
+
+static int f_seek (lua_State *L) {
+ static const int mode[] = {SEEK_SET, SEEK_CUR, SEEK_END};
+ static const char *const modenames[] = {"set", "cur", "end", NULL};
+ FILE *f = tofile(L);
+ int op = luaL_checkoption(L, 2, "cur", modenames);
+ lua_Integer p3 = luaL_optinteger(L, 3, 0);
+ l_seeknum offset = (l_seeknum)p3;
+ luaL_argcheck(L, (lua_Integer)offset == p3, 3,
+ "not an integer in proper range");
+ op = l_fseek(f, offset, mode[op]);
+ if (op)
+ return luaL_fileresult(L, 0, NULL); /* error */
+ else {
+ lua_pushinteger(L, (lua_Integer)l_ftell(f));
+ return 1;
+ }
+}
+
+
+static int f_setvbuf (lua_State *L) {
+ static const int mode[] = {_IONBF, _IOFBF, _IOLBF};
+ static const char *const modenames[] = {"no", "full", "line", NULL};
+ FILE *f = tofile(L);
+ int op = luaL_checkoption(L, 2, NULL, modenames);
+ lua_Integer sz = luaL_optinteger(L, 3, LUAL_BUFFERSIZE);
+ int res = setvbuf(f, NULL, mode[op], (size_t)sz);
+ return luaL_fileresult(L, res == 0, NULL);
+}
+
+
+
+static int io_flush (lua_State *L) {
+ return luaL_fileresult(L, fflush(getiofile(L, IO_OUTPUT)) == 0, NULL);
+}
+
+
+static int f_flush (lua_State *L) {
+ return luaL_fileresult(L, fflush(tofile(L)) == 0, NULL);
+}
+
+
+/*
+** functions for 'io' library
+*/
+static const luaL_Reg iolib[] = {
+ {"close", io_close},
+ {"flush", io_flush},
+ {"input", io_input},
+ {"lines", io_lines},
+ {"open", io_open},
+ {"output", io_output},
+ {"popen", io_popen},
+ {"read", io_read},
+ {"tmpfile", io_tmpfile},
+ {"type", io_type},
+ {"write", io_write},
+ {NULL, NULL}
+};
+
+
+/*
+** methods for file handles
+*/
+static const luaL_Reg flib[] = {
+ {"close", io_close},
+ {"flush", f_flush},
+ {"lines", f_lines},
+ {"read", f_read},
+ {"seek", f_seek},
+ {"setvbuf", f_setvbuf},
+ {"write", f_write},
+ {"__gc", f_gc},
+ {"__tostring", f_tostring},
+ {NULL, NULL}
+};
+
+
+static void createmeta (lua_State *L) {
+ luaL_newmetatable(L, LUA_FILEHANDLE); /* create metatable for file handles */
+ lua_pushvalue(L, -1); /* push metatable */
+ lua_setfield(L, -2, "__index"); /* metatable.__index = metatable */
+ luaL_setfuncs(L, flib, 0); /* add file methods to new metatable */
+ lua_pop(L, 1); /* pop new metatable */
+}
+
+
+/*
+** function to (not) close the standard files stdin, stdout, and stderr
+*/
+static int io_noclose (lua_State *L) {
+ LStream *p = tolstream(L);
+ p->closef = &io_noclose; /* keep file opened */
+ lua_pushnil(L);
+ lua_pushliteral(L, "cannot close standard file");
+ return 2;
+}
+
+
+static void createstdfile (lua_State *L, FILE *f, const char *k,
+ const char *fname) {
+ LStream *p = newprefile(L);
+ p->f = f;
+ p->closef = &io_noclose;
+ if (k != NULL) {
+ lua_pushvalue(L, -1);
+ lua_setfield(L, LUA_REGISTRYINDEX, k); /* add file to registry */
+ }
+ lua_setfield(L, -2, fname); /* add file to module */
+}
+
+
+LUAMOD_API int luaopen_io (lua_State *L) {
+ luaL_newlib(L, iolib); /* new module */
+ createmeta(L);
+ /* create (and set) default files */
+ createstdfile(L, stdin, IO_INPUT, "stdin");
+ createstdfile(L, stdout, IO_OUTPUT, "stdout");
+ createstdfile(L, stderr, NULL, "stderr");
+ return 1;
+}
+
diff --git a/lua/src/llex.c b/lua/src/llex.c
new file mode 100644
index 000000000..70328273f
--- /dev/null
+++ b/lua/src/llex.c
@@ -0,0 +1,565 @@
+/*
+** $Id: llex.c,v 2.96 2016/05/02 14:02:12 roberto Exp $
+** Lexical Analyzer
+** See Copyright Notice in lua.h
+*/
+
+#define llex_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <locale.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lctype.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lgc.h"
+#include "llex.h"
+#include "lobject.h"
+#include "lparser.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "lzio.h"
+
+
+
+#define next(ls) (ls->current = zgetc(ls->z))
+
+
+
+#define currIsNewline(ls) (ls->current == '\n' || ls->current == '\r')
+
+
+/* ORDER RESERVED */
+static const char *const luaX_tokens [] = {
+ "and", "break", "do", "else", "elseif",
+ "end", "false", "for", "function", "goto", "if",
+ "in", "local", "nil", "not", "or", "repeat",
+ "return", "then", "true", "until", "while",
+ "//", "..", "...", "==", ">=", "<=", "~=",
+ "<<", ">>", "::", "<eof>",
+ "<number>", "<integer>", "<name>", "<string>"
+};
+
+
+#define save_and_next(ls) (save(ls, ls->current), next(ls))
+
+
+static l_noret lexerror (LexState *ls, const char *msg, int token);
+
+
+static void save (LexState *ls, int c) {
+ Mbuffer *b = ls->buff;
+ if (luaZ_bufflen(b) + 1 > luaZ_sizebuffer(b)) {
+ size_t newsize;
+ if (luaZ_sizebuffer(b) >= MAX_SIZE/2)
+ lexerror(ls, "lexical element too long", 0);
+ newsize = luaZ_sizebuffer(b) * 2;
+ luaZ_resizebuffer(ls->L, b, newsize);
+ }
+ b->buffer[luaZ_bufflen(b)++] = cast(char, c);
+}
+
+
+void luaX_init (lua_State *L) {
+ int i;
+ TString *e = luaS_newliteral(L, LUA_ENV); /* create env name */
+ luaC_fix(L, obj2gco(e)); /* never collect this name */
+ for (i=0; i<NUM_RESERVED; i++) {
+ TString *ts = luaS_new(L, luaX_tokens[i]);
+ luaC_fix(L, obj2gco(ts)); /* reserved words are never collected */
+ ts->extra = cast_byte(i+1); /* reserved word */
+ }
+}
+
+
+const char *luaX_token2str (LexState *ls, int token) {
+ if (token < FIRST_RESERVED) { /* single-byte symbols? */
+ lua_assert(token == cast_uchar(token));
+ return luaO_pushfstring(ls->L, "'%c'", token);
+ }
+ else {
+ const char *s = luaX_tokens[token - FIRST_RESERVED];
+ if (token < TK_EOS) /* fixed format (symbols and reserved words)? */
+ return luaO_pushfstring(ls->L, "'%s'", s);
+ else /* names, strings, and numerals */
+ return s;
+ }
+}
+
+
+static const char *txtToken (LexState *ls, int token) {
+ switch (token) {
+ case TK_NAME: case TK_STRING:
+ case TK_FLT: case TK_INT:
+ save(ls, '\0');
+ return luaO_pushfstring(ls->L, "'%s'", luaZ_buffer(ls->buff));
+ default:
+ return luaX_token2str(ls, token);
+ }
+}
+
+
+static l_noret lexerror (LexState *ls, const char *msg, int token) {
+ msg = luaG_addinfo(ls->L, msg, ls->source, ls->linenumber);
+ if (token)
+ luaO_pushfstring(ls->L, "%s near %s", msg, txtToken(ls, token));
+ luaD_throw(ls->L, LUA_ERRSYNTAX);
+}
+
+
+l_noret luaX_syntaxerror (LexState *ls, const char *msg) {
+ lexerror(ls, msg, ls->t.token);
+}
+
+
+/*
+** creates a new string and anchors it in scanner's table so that
+** it will not be collected until the end of the compilation
+** (by that time it should be anchored somewhere)
+*/
+TString *luaX_newstring (LexState *ls, const char *str, size_t l) {
+ lua_State *L = ls->L;
+ TValue *o; /* entry for 'str' */
+ TString *ts = luaS_newlstr(L, str, l); /* create new string */
+ setsvalue2s(L, L->top++, ts); /* temporarily anchor it in stack */
+ o = luaH_set(L, ls->h, L->top - 1);
+ if (ttisnil(o)) { /* not in use yet? */
+ /* boolean value does not need GC barrier;
+ table has no metatable, so it does not need to invalidate cache */
+ setbvalue(o, 1); /* t[string] = true */
+ luaC_checkGC(L);
+ }
+ else { /* string already present */
+ ts = tsvalue(keyfromval(o)); /* re-use value previously stored */
+ }
+ L->top--; /* remove string from stack */
+ return ts;
+}
+
+
+/*
+** increment line number and skips newline sequence (any of
+** \n, \r, \n\r, or \r\n)
+*/
+static void inclinenumber (LexState *ls) {
+ int old = ls->current;
+ lua_assert(currIsNewline(ls));
+ next(ls); /* skip '\n' or '\r' */
+ if (currIsNewline(ls) && ls->current != old)
+ next(ls); /* skip '\n\r' or '\r\n' */
+ if (++ls->linenumber >= MAX_INT)
+ lexerror(ls, "chunk has too many lines", 0);
+}
+
+
+void luaX_setinput (lua_State *L, LexState *ls, ZIO *z, TString *source,
+ int firstchar) {
+ ls->t.token = 0;
+ ls->L = L;
+ ls->current = firstchar;
+ ls->lookahead.token = TK_EOS; /* no look-ahead token */
+ ls->z = z;
+ ls->fs = NULL;
+ ls->linenumber = 1;
+ ls->lastline = 1;
+ ls->source = source;
+ ls->envn = luaS_newliteral(L, LUA_ENV); /* get env name */
+ luaZ_resizebuffer(ls->L, ls->buff, LUA_MINBUFFER); /* initialize buffer */
+}
+
+
+
+/*
+** =======================================================
+** LEXICAL ANALYZER
+** =======================================================
+*/
+
+
+static int check_next1 (LexState *ls, int c) {
+ if (ls->current == c) {
+ next(ls);
+ return 1;
+ }
+ else return 0;
+}
+
+
+/*
+** Check whether current char is in set 'set' (with two chars) and
+** saves it
+*/
+static int check_next2 (LexState *ls, const char *set) {
+ lua_assert(set[2] == '\0');
+ if (ls->current == set[0] || ls->current == set[1]) {
+ save_and_next(ls);
+ return 1;
+ }
+ else return 0;
+}
+
+
+/* LUA_NUMBER */
+/*
+** this function is quite liberal in what it accepts, as 'luaO_str2num'
+** will reject ill-formed numerals.
+*/
+static int read_numeral (LexState *ls, SemInfo *seminfo) {
+ TValue obj;
+ const char *expo = "Ee";
+ int first = ls->current;
+ lua_assert(lisdigit(ls->current));
+ save_and_next(ls);
+ if (first == '0' && check_next2(ls, "xX")) /* hexadecimal? */
+ expo = "Pp";
+ for (;;) {
+ if (check_next2(ls, expo)) /* exponent part? */
+ check_next2(ls, "-+"); /* optional exponent sign */
+ if (lisxdigit(ls->current))
+ save_and_next(ls);
+ else if (ls->current == '.')
+ save_and_next(ls);
+ else break;
+ }
+ save(ls, '\0');
+ if (luaO_str2num(luaZ_buffer(ls->buff), &obj) == 0) /* format error? */
+ lexerror(ls, "malformed number", TK_FLT);
+ if (ttisinteger(&obj)) {
+ seminfo->i = ivalue(&obj);
+ return TK_INT;
+ }
+ else {
+ lua_assert(ttisfloat(&obj));
+ seminfo->r = fltvalue(&obj);
+ return TK_FLT;
+ }
+}
+
+
+/*
+** skip a sequence '[=*[' or ']=*]'; if sequence is well formed, return
+** its number of '='s; otherwise, return a negative number (-1 iff there
+** are no '='s after initial bracket)
+*/
+static int skip_sep (LexState *ls) {
+ int count = 0;
+ int s = ls->current;
+ lua_assert(s == '[' || s == ']');
+ save_and_next(ls);
+ while (ls->current == '=') {
+ save_and_next(ls);
+ count++;
+ }
+ return (ls->current == s) ? count : (-count) - 1;
+}
+
+
+static void read_long_string (LexState *ls, SemInfo *seminfo, int sep) {
+ int line = ls->linenumber; /* initial line (for error message) */
+ save_and_next(ls); /* skip 2nd '[' */
+ if (currIsNewline(ls)) /* string starts with a newline? */
+ inclinenumber(ls); /* skip it */
+ for (;;) {
+ switch (ls->current) {
+ case EOZ: { /* error */
+ const char *what = (seminfo ? "string" : "comment");
+ const char *msg = luaO_pushfstring(ls->L,
+ "unfinished long %s (starting at line %d)", what, line);
+ lexerror(ls, msg, TK_EOS);
+ break; /* to avoid warnings */
+ }
+ case ']': {
+ if (skip_sep(ls) == sep) {
+ save_and_next(ls); /* skip 2nd ']' */
+ goto endloop;
+ }
+ break;
+ }
+ case '\n': case '\r': {
+ save(ls, '\n');
+ inclinenumber(ls);
+ if (!seminfo) luaZ_resetbuffer(ls->buff); /* avoid wasting space */
+ break;
+ }
+ default: {
+ if (seminfo) save_and_next(ls);
+ else next(ls);
+ }
+ }
+ } endloop:
+ if (seminfo)
+ seminfo->ts = luaX_newstring(ls, luaZ_buffer(ls->buff) + (2 + sep),
+ luaZ_bufflen(ls->buff) - 2*(2 + sep));
+}
+
+
+static void esccheck (LexState *ls, int c, const char *msg) {
+ if (!c) {
+ if (ls->current != EOZ)
+ save_and_next(ls); /* add current to buffer for error message */
+ lexerror(ls, msg, TK_STRING);
+ }
+}
+
+
+static int gethexa (LexState *ls) {
+ save_and_next(ls);
+ esccheck (ls, lisxdigit(ls->current), "hexadecimal digit expected");
+ return luaO_hexavalue(ls->current);
+}
+
+
+static int readhexaesc (LexState *ls) {
+ int r = gethexa(ls);
+ r = (r << 4) + gethexa(ls);
+ luaZ_buffremove(ls->buff, 2); /* remove saved chars from buffer */
+ return r;
+}
+
+
+static unsigned long readutf8esc (LexState *ls) {
+ unsigned long r;
+ int i = 4; /* chars to be removed: '\', 'u', '{', and first digit */
+ save_and_next(ls); /* skip 'u' */
+ esccheck(ls, ls->current == '{', "missing '{'");
+ r = gethexa(ls); /* must have at least one digit */
+ while ((save_and_next(ls), lisxdigit(ls->current))) {
+ i++;
+ r = (r << 4) + luaO_hexavalue(ls->current);
+ esccheck(ls, r <= 0x10FFFF, "UTF-8 value too large");
+ }
+ esccheck(ls, ls->current == '}', "missing '}'");
+ next(ls); /* skip '}' */
+ luaZ_buffremove(ls->buff, i); /* remove saved chars from buffer */
+ return r;
+}
+
+
+static void utf8esc (LexState *ls) {
+ char buff[UTF8BUFFSZ];
+ int n = luaO_utf8esc(buff, readutf8esc(ls));
+ for (; n > 0; n--) /* add 'buff' to string */
+ save(ls, buff[UTF8BUFFSZ - n]);
+}
+
+
+static int readdecesc (LexState *ls) {
+ int i;
+ int r = 0; /* result accumulator */
+ for (i = 0; i < 3 && lisdigit(ls->current); i++) { /* read up to 3 digits */
+ r = 10*r + ls->current - '0';
+ save_and_next(ls);
+ }
+ esccheck(ls, r <= UCHAR_MAX, "decimal escape too large");
+ luaZ_buffremove(ls->buff, i); /* remove read digits from buffer */
+ return r;
+}
+
+
+static void read_string (LexState *ls, int del, SemInfo *seminfo) {
+ save_and_next(ls); /* keep delimiter (for error messages) */
+ while (ls->current != del) {
+ switch (ls->current) {
+ case EOZ:
+ lexerror(ls, "unfinished string", TK_EOS);
+ break; /* to avoid warnings */
+ case '\n':
+ case '\r':
+ lexerror(ls, "unfinished string", TK_STRING);
+ break; /* to avoid warnings */
+ case '\\': { /* escape sequences */
+ int c; /* final character to be saved */
+ save_and_next(ls); /* keep '\\' for error messages */
+ switch (ls->current) {
+ case 'a': c = '\a'; goto read_save;
+ case 'b': c = '\b'; goto read_save;
+ case 'f': c = '\f'; goto read_save;
+ case 'n': c = '\n'; goto read_save;
+ case 'r': c = '\r'; goto read_save;
+ case 't': c = '\t'; goto read_save;
+ case 'v': c = '\v'; goto read_save;
+ case 'x': c = readhexaesc(ls); goto read_save;
+ case 'u': utf8esc(ls); goto no_save;
+ case '\n': case '\r':
+ inclinenumber(ls); c = '\n'; goto only_save;
+ case '\\': case '\"': case '\'':
+ c = ls->current; goto read_save;
+ case EOZ: goto no_save; /* will raise an error next loop */
+ case 'z': { /* zap following span of spaces */
+ luaZ_buffremove(ls->buff, 1); /* remove '\\' */
+ next(ls); /* skip the 'z' */
+ while (lisspace(ls->current)) {
+ if (currIsNewline(ls)) inclinenumber(ls);
+ else next(ls);
+ }
+ goto no_save;
+ }
+ default: {
+ esccheck(ls, lisdigit(ls->current), "invalid escape sequence");
+ c = readdecesc(ls); /* digital escape '\ddd' */
+ goto only_save;
+ }
+ }
+ read_save:
+ next(ls);
+ /* go through */
+ only_save:
+ luaZ_buffremove(ls->buff, 1); /* remove '\\' */
+ save(ls, c);
+ /* go through */
+ no_save: break;
+ }
+ default:
+ save_and_next(ls);
+ }
+ }
+ save_and_next(ls); /* skip delimiter */
+ seminfo->ts = luaX_newstring(ls, luaZ_buffer(ls->buff) + 1,
+ luaZ_bufflen(ls->buff) - 2);
+}
+
+
+static int llex (LexState *ls, SemInfo *seminfo) {
+ luaZ_resetbuffer(ls->buff);
+ for (;;) {
+ switch (ls->current) {
+ case '\n': case '\r': { /* line breaks */
+ inclinenumber(ls);
+ break;
+ }
+ case ' ': case '\f': case '\t': case '\v': { /* spaces */
+ next(ls);
+ break;
+ }
+ case '-': { /* '-' or '--' (comment) */
+ next(ls);
+ if (ls->current != '-') return '-';
+ /* else is a comment */
+ next(ls);
+ if (ls->current == '[') { /* long comment? */
+ int sep = skip_sep(ls);
+ luaZ_resetbuffer(ls->buff); /* 'skip_sep' may dirty the buffer */
+ if (sep >= 0) {
+ read_long_string(ls, NULL, sep); /* skip long comment */
+ luaZ_resetbuffer(ls->buff); /* previous call may dirty the buff. */
+ break;
+ }
+ }
+ /* else short comment */
+ while (!currIsNewline(ls) && ls->current != EOZ)
+ next(ls); /* skip until end of line (or end of file) */
+ break;
+ }
+ case '[': { /* long string or simply '[' */
+ int sep = skip_sep(ls);
+ if (sep >= 0) {
+ read_long_string(ls, seminfo, sep);
+ return TK_STRING;
+ }
+ else if (sep != -1) /* '[=...' missing second bracket */
+ lexerror(ls, "invalid long string delimiter", TK_STRING);
+ return '[';
+ }
+ case '=': {
+ next(ls);
+ if (check_next1(ls, '=')) return TK_EQ;
+ else return '=';
+ }
+ case '<': {
+ next(ls);
+ if (check_next1(ls, '=')) return TK_LE;
+ else if (check_next1(ls, '<')) return TK_SHL;
+ else return '<';
+ }
+ case '>': {
+ next(ls);
+ if (check_next1(ls, '=')) return TK_GE;
+ else if (check_next1(ls, '>')) return TK_SHR;
+ else return '>';
+ }
+ case '/': {
+ next(ls);
+ if (check_next1(ls, '/')) return TK_IDIV;
+ else return '/';
+ }
+ case '~': {
+ next(ls);
+ if (check_next1(ls, '=')) return TK_NE;
+ else return '~';
+ }
+ case ':': {
+ next(ls);
+ if (check_next1(ls, ':')) return TK_DBCOLON;
+ else return ':';
+ }
+ case '"': case '\'': { /* short literal strings */
+ read_string(ls, ls->current, seminfo);
+ return TK_STRING;
+ }
+ case '.': { /* '.', '..', '...', or number */
+ save_and_next(ls);
+ if (check_next1(ls, '.')) {
+ if (check_next1(ls, '.'))
+ return TK_DOTS; /* '...' */
+ else return TK_CONCAT; /* '..' */
+ }
+ else if (!lisdigit(ls->current)) return '.';
+ else return read_numeral(ls, seminfo);
+ }
+ case '0': case '1': case '2': case '3': case '4':
+ case '5': case '6': case '7': case '8': case '9': {
+ return read_numeral(ls, seminfo);
+ }
+ case EOZ: {
+ return TK_EOS;
+ }
+ default: {
+ if (lislalpha(ls->current)) { /* identifier or reserved word? */
+ TString *ts;
+ do {
+ save_and_next(ls);
+ } while (lislalnum(ls->current));
+ ts = luaX_newstring(ls, luaZ_buffer(ls->buff),
+ luaZ_bufflen(ls->buff));
+ seminfo->ts = ts;
+ if (isreserved(ts)) /* reserved word? */
+ return ts->extra - 1 + FIRST_RESERVED;
+ else {
+ return TK_NAME;
+ }
+ }
+ else { /* single-char tokens (+ - / ...) */
+ int c = ls->current;
+ next(ls);
+ return c;
+ }
+ }
+ }
+ }
+}
+
+
+void luaX_next (LexState *ls) {
+ ls->lastline = ls->linenumber;
+ if (ls->lookahead.token != TK_EOS) { /* is there a look-ahead token? */
+ ls->t = ls->lookahead; /* use this one */
+ ls->lookahead.token = TK_EOS; /* and discharge it */
+ }
+ else
+ ls->t.token = llex(ls, &ls->t.seminfo); /* read next token */
+}
+
+
+int luaX_lookahead (LexState *ls) {
+ lua_assert(ls->lookahead.token == TK_EOS);
+ ls->lookahead.token = llex(ls, &ls->lookahead.seminfo);
+ return ls->lookahead.token;
+}
+
diff --git a/lua/src/llex.h b/lua/src/llex.h
new file mode 100644
index 000000000..2363d87e4
--- /dev/null
+++ b/lua/src/llex.h
@@ -0,0 +1,85 @@
+/*
+** $Id: llex.h,v 1.79 2016/05/02 14:02:12 roberto Exp $
+** Lexical Analyzer
+** See Copyright Notice in lua.h
+*/
+
+#ifndef llex_h
+#define llex_h
+
+#include "lobject.h"
+#include "lzio.h"
+
+
+#define FIRST_RESERVED 257
+
+
+#if !defined(LUA_ENV)
+#define LUA_ENV "_ENV"
+#endif
+
+
+/*
+* WARNING: if you change the order of this enumeration,
+* grep "ORDER RESERVED"
+*/
+enum RESERVED {
+ /* terminal symbols denoted by reserved words */
+ TK_AND = FIRST_RESERVED, TK_BREAK,
+ TK_DO, TK_ELSE, TK_ELSEIF, TK_END, TK_FALSE, TK_FOR, TK_FUNCTION,
+ TK_GOTO, TK_IF, TK_IN, TK_LOCAL, TK_NIL, TK_NOT, TK_OR, TK_REPEAT,
+ TK_RETURN, TK_THEN, TK_TRUE, TK_UNTIL, TK_WHILE,
+ /* other terminal symbols */
+ TK_IDIV, TK_CONCAT, TK_DOTS, TK_EQ, TK_GE, TK_LE, TK_NE,
+ TK_SHL, TK_SHR,
+ TK_DBCOLON, TK_EOS,
+ TK_FLT, TK_INT, TK_NAME, TK_STRING
+};
+
+/* number of reserved words */
+#define NUM_RESERVED (cast(int, TK_WHILE-FIRST_RESERVED+1))
+
+
+typedef union {
+ lua_Number r;
+ lua_Integer i;
+ TString *ts;
+} SemInfo; /* semantics information */
+
+
+typedef struct Token {
+ int token;
+ SemInfo seminfo;
+} Token;
+
+
+/* state of the lexer plus state of the parser when shared by all
+ functions */
+typedef struct LexState {
+ int current; /* current character (charint) */
+ int linenumber; /* input line counter */
+ int lastline; /* line of last token 'consumed' */
+ Token t; /* current token */
+ Token lookahead; /* look ahead token */
+ struct FuncState *fs; /* current function (parser) */
+ struct lua_State *L;
+ ZIO *z; /* input stream */
+ Mbuffer *buff; /* buffer for tokens */
+ Table *h; /* to avoid collection/reuse strings */
+ struct Dyndata *dyd; /* dynamic structures used by the parser */
+ TString *source; /* current source name */
+ TString *envn; /* environment variable name */
+} LexState;
+
+
+LUAI_FUNC void luaX_init (lua_State *L);
+LUAI_FUNC void luaX_setinput (lua_State *L, LexState *ls, ZIO *z,
+ TString *source, int firstchar);
+LUAI_FUNC TString *luaX_newstring (LexState *ls, const char *str, size_t l);
+LUAI_FUNC void luaX_next (LexState *ls);
+LUAI_FUNC int luaX_lookahead (LexState *ls);
+LUAI_FUNC l_noret luaX_syntaxerror (LexState *ls, const char *s);
+LUAI_FUNC const char *luaX_token2str (LexState *ls, int token);
+
+
+#endif
diff --git a/lua/src/llimits.h b/lua/src/llimits.h
new file mode 100644
index 000000000..f21377fef
--- /dev/null
+++ b/lua/src/llimits.h
@@ -0,0 +1,323 @@
+/*
+** $Id: llimits.h,v 1.141 2015/11/19 19:16:22 roberto Exp $
+** Limits, basic types, and some other 'installation-dependent' definitions
+** See Copyright Notice in lua.h
+*/
+
+#ifndef llimits_h
+#define llimits_h
+
+
+#include <limits.h>
+#include <stddef.h>
+
+
+#include "lua.h"
+
+/*
+** 'lu_mem' and 'l_mem' are unsigned/signed integers big enough to count
+** the total memory used by Lua (in bytes). Usually, 'size_t' and
+** 'ptrdiff_t' should work, but we use 'long' for 16-bit machines.
+*/
+#if defined(LUAI_MEM) /* { external definitions? */
+typedef LUAI_UMEM lu_mem;
+typedef LUAI_MEM l_mem;
+#elif LUAI_BITSINT >= 32 /* }{ */
+typedef size_t lu_mem;
+typedef ptrdiff_t l_mem;
+#else /* 16-bit ints */ /* }{ */
+typedef unsigned long lu_mem;
+typedef long l_mem;
+#endif /* } */
+
+
+/* chars used as small naturals (so that 'char' is reserved for characters) */
+typedef unsigned char lu_byte;
+
+
+/* maximum value for size_t */
+#define MAX_SIZET ((size_t)(~(size_t)0))
+
+/* maximum size visible for Lua (must be representable in a lua_Integer */
+#define MAX_SIZE (sizeof(size_t) < sizeof(lua_Integer) ? MAX_SIZET \
+ : (size_t)(LUA_MAXINTEGER))
+
+
+#define MAX_LUMEM ((lu_mem)(~(lu_mem)0))
+
+#define MAX_LMEM ((l_mem)(MAX_LUMEM >> 1))
+
+
+#define MAX_INT INT_MAX /* maximum value of an int */
+
+
+/*
+** conversion of pointer to unsigned integer:
+** this is for hashing only; there is no problem if the integer
+** cannot hold the whole pointer value
+*/
+#define point2uint(p) ((unsigned int)((size_t)(p) & UINT_MAX))
+
+
+
+/* type to ensure maximum alignment */
+#if defined(LUAI_USER_ALIGNMENT_T)
+typedef LUAI_USER_ALIGNMENT_T L_Umaxalign;
+#else
+typedef union {
+ lua_Number n;
+ double u;
+ void *s;
+ lua_Integer i;
+ long l;
+} L_Umaxalign;
+#endif
+
+
+
+/* types of 'usual argument conversions' for lua_Number and lua_Integer */
+typedef LUAI_UACNUMBER l_uacNumber;
+typedef LUAI_UACINT l_uacInt;
+
+
+/* internal assertions for in-house debugging */
+#if defined(lua_assert)
+#define check_exp(c,e) (lua_assert(c), (e))
+/* to avoid problems with conditions too long */
+#define lua_longassert(c) ((c) ? (void)0 : lua_assert(0))
+#else
+#define lua_assert(c) ((void)0)
+#define check_exp(c,e) (e)
+#define lua_longassert(c) ((void)0)
+#endif
+
+/*
+** assertion for checking API calls
+*/
+#if !defined(luai_apicheck)
+#define luai_apicheck(l,e) lua_assert(e)
+#endif
+
+#define api_check(l,e,msg) luai_apicheck(l,(e) && msg)
+
+
+/* macro to avoid warnings about unused variables */
+#if !defined(UNUSED)
+#define UNUSED(x) ((void)(x))
+#endif
+
+
+/* type casts (a macro highlights casts in the code) */
+#define cast(t, exp) ((t)(exp))
+
+#define cast_void(i) cast(void, (i))
+#define cast_byte(i) cast(lu_byte, (i))
+#define cast_num(i) cast(lua_Number, (i))
+#define cast_int(i) cast(int, (i))
+#define cast_uchar(i) cast(unsigned char, (i))
+
+
+/* cast a signed lua_Integer to lua_Unsigned */
+#if !defined(l_castS2U)
+#define l_castS2U(i) ((lua_Unsigned)(i))
+#endif
+
+/*
+** cast a lua_Unsigned to a signed lua_Integer; this cast is
+** not strict ISO C, but two-complement architectures should
+** work fine.
+*/
+#if !defined(l_castU2S)
+#define l_castU2S(i) ((lua_Integer)(i))
+#endif
+
+
+/*
+** non-return type
+*/
+#if defined(__GNUC__)
+#define l_noret void __attribute__((noreturn))
+#elif defined(_MSC_VER) && _MSC_VER >= 1200
+#define l_noret void __declspec(noreturn)
+#else
+#define l_noret void
+#endif
+
+
+
+/*
+** maximum depth for nested C calls and syntactical nested non-terminals
+** in a program. (Value must fit in an unsigned short int.)
+*/
+#if !defined(LUAI_MAXCCALLS)
+#define LUAI_MAXCCALLS 200
+#endif
+
+
+
+/*
+** type for virtual-machine instructions;
+** must be an unsigned with (at least) 4 bytes (see details in lopcodes.h)
+*/
+#if LUAI_BITSINT >= 32
+typedef unsigned int Instruction;
+#else
+typedef unsigned long Instruction;
+#endif
+
+
+
+/*
+** Maximum length for short strings, that is, strings that are
+** internalized. (Cannot be smaller than reserved words or tags for
+** metamethods, as these strings must be internalized;
+** #("function") = 8, #("__newindex") = 10.)
+*/
+#if !defined(LUAI_MAXSHORTLEN)
+#define LUAI_MAXSHORTLEN 40
+#endif
+
+
+/*
+** Initial size for the string table (must be power of 2).
+** The Lua core alone registers ~50 strings (reserved words +
+** metaevent keys + a few others). Libraries would typically add
+** a few dozens more.
+*/
+#if !defined(MINSTRTABSIZE)
+#define MINSTRTABSIZE 128
+#endif
+
+
+/*
+** Size of cache for strings in the API. 'N' is the number of
+** sets (better be a prime) and "M" is the size of each set (M == 1
+** makes a direct cache.)
+*/
+#if !defined(STRCACHE_N)
+#define STRCACHE_N 53
+#define STRCACHE_M 2
+#endif
+
+
+/* minimum size for string buffer */
+#if !defined(LUA_MINBUFFER)
+#define LUA_MINBUFFER 32
+#endif
+
+
+/*
+** macros that are executed whenever program enters the Lua core
+** ('lua_lock') and leaves the core ('lua_unlock')
+*/
+#if !defined(lua_lock)
+#define lua_lock(L) ((void) 0)
+#define lua_unlock(L) ((void) 0)
+#endif
+
+/*
+** macro executed during Lua functions at points where the
+** function can yield.
+*/
+#if !defined(luai_threadyield)
+#define luai_threadyield(L) {lua_unlock(L); lua_lock(L);}
+#endif
+
+
+/*
+** these macros allow user-specific actions on threads when you defined
+** LUAI_EXTRASPACE and need to do something extra when a thread is
+** created/deleted/resumed/yielded.
+*/
+#if !defined(luai_userstateopen)
+#define luai_userstateopen(L) ((void)L)
+#endif
+
+#if !defined(luai_userstateclose)
+#define luai_userstateclose(L) ((void)L)
+#endif
+
+#if !defined(luai_userstatethread)
+#define luai_userstatethread(L,L1) ((void)L)
+#endif
+
+#if !defined(luai_userstatefree)
+#define luai_userstatefree(L,L1) ((void)L)
+#endif
+
+#if !defined(luai_userstateresume)
+#define luai_userstateresume(L,n) ((void)L)
+#endif
+
+#if !defined(luai_userstateyield)
+#define luai_userstateyield(L,n) ((void)L)
+#endif
+
+
+
+/*
+** The luai_num* macros define the primitive operations over numbers.
+*/
+
+/* floor division (defined as 'floor(a/b)') */
+#if !defined(luai_numidiv)
+#define luai_numidiv(L,a,b) ((void)L, l_floor(luai_numdiv(L,a,b)))
+#endif
+
+/* float division */
+#if !defined(luai_numdiv)
+#define luai_numdiv(L,a,b) ((a)/(b))
+#endif
+
+/*
+** modulo: defined as 'a - floor(a/b)*b'; this definition gives NaN when
+** 'b' is huge, but the result should be 'a'. 'fmod' gives the result of
+** 'a - trunc(a/b)*b', and therefore must be corrected when 'trunc(a/b)
+** ~= floor(a/b)'. That happens when the division has a non-integer
+** negative result, which is equivalent to the test below.
+*/
+#if !defined(luai_nummod)
+#define luai_nummod(L,a,b,m) \
+ { (m) = l_mathop(fmod)(a,b); if ((m)*(b) < 0) (m) += (b); }
+#endif
+
+/* exponentiation */
+#if !defined(luai_numpow)
+#define luai_numpow(L,a,b) ((void)L, l_mathop(pow)(a,b))
+#endif
+
+/* the others are quite standard operations */
+#if !defined(luai_numadd)
+#define luai_numadd(L,a,b) ((a)+(b))
+#define luai_numsub(L,a,b) ((a)-(b))
+#define luai_nummul(L,a,b) ((a)*(b))
+#define luai_numunm(L,a) (-(a))
+#define luai_numeq(a,b) ((a)==(b))
+#define luai_numlt(a,b) ((a)<(b))
+#define luai_numle(a,b) ((a)<=(b))
+#define luai_numisnan(a) (!luai_numeq((a), (a)))
+#endif
+
+
+
+
+
+/*
+** macro to control inclusion of some hard tests on stack reallocation
+*/
+#if !defined(HARDSTACKTESTS)
+#define condmovestack(L,pre,pos) ((void)0)
+#else
+/* realloc stack keeping its size */
+#define condmovestack(L,pre,pos) \
+ { int sz_ = (L)->stacksize; pre; luaD_reallocstack((L), sz_); pos; }
+#endif
+
+#if !defined(HARDMEMTESTS)
+#define condchangemem(L,pre,pos) ((void)0)
+#else
+#define condchangemem(L,pre,pos) \
+ { if (G(L)->gcrunning) { pre; luaC_fullgc(L, 0); pos; } }
+#endif
+
+#endif
diff --git a/lua/src/lmathlib.c b/lua/src/lmathlib.c
new file mode 100644
index 000000000..b7f8baee0
--- /dev/null
+++ b/lua/src/lmathlib.c
@@ -0,0 +1,410 @@
+/*
+** $Id: lmathlib.c,v 1.119 2016/12/22 13:08:50 roberto Exp $
+** Standard mathematical library
+** See Copyright Notice in lua.h
+*/
+
+#define lmathlib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <stdlib.h>
+#include <math.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+#undef PI
+#define PI (l_mathop(3.141592653589793238462643383279502884))
+
+
+#if !defined(l_rand) /* { */
+#if defined(LUA_USE_POSIX)
+#define l_rand() random()
+#define l_srand(x) srandom(x)
+#define L_RANDMAX 2147483647 /* (2^31 - 1), following POSIX */
+#else
+#define l_rand() rand()
+#define l_srand(x) srand(x)
+#define L_RANDMAX RAND_MAX
+#endif
+#endif /* } */
+
+
+static int math_abs (lua_State *L) {
+ if (lua_isinteger(L, 1)) {
+ lua_Integer n = lua_tointeger(L, 1);
+ if (n < 0) n = (lua_Integer)(0u - (lua_Unsigned)n);
+ lua_pushinteger(L, n);
+ }
+ else
+ lua_pushnumber(L, l_mathop(fabs)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_sin (lua_State *L) {
+ lua_pushnumber(L, l_mathop(sin)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_cos (lua_State *L) {
+ lua_pushnumber(L, l_mathop(cos)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_tan (lua_State *L) {
+ lua_pushnumber(L, l_mathop(tan)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_asin (lua_State *L) {
+ lua_pushnumber(L, l_mathop(asin)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_acos (lua_State *L) {
+ lua_pushnumber(L, l_mathop(acos)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_atan (lua_State *L) {
+ lua_Number y = luaL_checknumber(L, 1);
+ lua_Number x = luaL_optnumber(L, 2, 1);
+ lua_pushnumber(L, l_mathop(atan2)(y, x));
+ return 1;
+}
+
+
+static int math_toint (lua_State *L) {
+ int valid;
+ lua_Integer n = lua_tointegerx(L, 1, &valid);
+ if (valid)
+ lua_pushinteger(L, n);
+ else {
+ luaL_checkany(L, 1);
+ lua_pushnil(L); /* value is not convertible to integer */
+ }
+ return 1;
+}
+
+
+static void pushnumint (lua_State *L, lua_Number d) {
+ lua_Integer n;
+ if (lua_numbertointeger(d, &n)) /* does 'd' fit in an integer? */
+ lua_pushinteger(L, n); /* result is integer */
+ else
+ lua_pushnumber(L, d); /* result is float */
+}
+
+
+static int math_floor (lua_State *L) {
+ if (lua_isinteger(L, 1))
+ lua_settop(L, 1); /* integer is its own floor */
+ else {
+ lua_Number d = l_mathop(floor)(luaL_checknumber(L, 1));
+ pushnumint(L, d);
+ }
+ return 1;
+}
+
+
+static int math_ceil (lua_State *L) {
+ if (lua_isinteger(L, 1))
+ lua_settop(L, 1); /* integer is its own ceil */
+ else {
+ lua_Number d = l_mathop(ceil)(luaL_checknumber(L, 1));
+ pushnumint(L, d);
+ }
+ return 1;
+}
+
+
+static int math_fmod (lua_State *L) {
+ if (lua_isinteger(L, 1) && lua_isinteger(L, 2)) {
+ lua_Integer d = lua_tointeger(L, 2);
+ if ((lua_Unsigned)d + 1u <= 1u) { /* special cases: -1 or 0 */
+ luaL_argcheck(L, d != 0, 2, "zero");
+ lua_pushinteger(L, 0); /* avoid overflow with 0x80000... / -1 */
+ }
+ else
+ lua_pushinteger(L, lua_tointeger(L, 1) % d);
+ }
+ else
+ lua_pushnumber(L, l_mathop(fmod)(luaL_checknumber(L, 1),
+ luaL_checknumber(L, 2)));
+ return 1;
+}
+
+
+/*
+** next function does not use 'modf', avoiding problems with 'double*'
+** (which is not compatible with 'float*') when lua_Number is not
+** 'double'.
+*/
+static int math_modf (lua_State *L) {
+ if (lua_isinteger(L ,1)) {
+ lua_settop(L, 1); /* number is its own integer part */
+ lua_pushnumber(L, 0); /* no fractional part */
+ }
+ else {
+ lua_Number n = luaL_checknumber(L, 1);
+ /* integer part (rounds toward zero) */
+ lua_Number ip = (n < 0) ? l_mathop(ceil)(n) : l_mathop(floor)(n);
+ pushnumint(L, ip);
+ /* fractional part (test needed for inf/-inf) */
+ lua_pushnumber(L, (n == ip) ? l_mathop(0.0) : (n - ip));
+ }
+ return 2;
+}
+
+
+static int math_sqrt (lua_State *L) {
+ lua_pushnumber(L, l_mathop(sqrt)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+
+static int math_ult (lua_State *L) {
+ lua_Integer a = luaL_checkinteger(L, 1);
+ lua_Integer b = luaL_checkinteger(L, 2);
+ lua_pushboolean(L, (lua_Unsigned)a < (lua_Unsigned)b);
+ return 1;
+}
+
+static int math_log (lua_State *L) {
+ lua_Number x = luaL_checknumber(L, 1);
+ lua_Number res;
+ if (lua_isnoneornil(L, 2))
+ res = l_mathop(log)(x);
+ else {
+ lua_Number base = luaL_checknumber(L, 2);
+#if !defined(LUA_USE_C89)
+ if (base == l_mathop(2.0))
+ res = l_mathop(log2)(x); else
+#endif
+ if (base == l_mathop(10.0))
+ res = l_mathop(log10)(x);
+ else
+ res = l_mathop(log)(x)/l_mathop(log)(base);
+ }
+ lua_pushnumber(L, res);
+ return 1;
+}
+
+static int math_exp (lua_State *L) {
+ lua_pushnumber(L, l_mathop(exp)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_deg (lua_State *L) {
+ lua_pushnumber(L, luaL_checknumber(L, 1) * (l_mathop(180.0) / PI));
+ return 1;
+}
+
+static int math_rad (lua_State *L) {
+ lua_pushnumber(L, luaL_checknumber(L, 1) * (PI / l_mathop(180.0)));
+ return 1;
+}
+
+
+static int math_min (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ int imin = 1; /* index of current minimum value */
+ int i;
+ luaL_argcheck(L, n >= 1, 1, "value expected");
+ for (i = 2; i <= n; i++) {
+ if (lua_compare(L, i, imin, LUA_OPLT))
+ imin = i;
+ }
+ lua_pushvalue(L, imin);
+ return 1;
+}
+
+
+static int math_max (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ int imax = 1; /* index of current maximum value */
+ int i;
+ luaL_argcheck(L, n >= 1, 1, "value expected");
+ for (i = 2; i <= n; i++) {
+ if (lua_compare(L, imax, i, LUA_OPLT))
+ imax = i;
+ }
+ lua_pushvalue(L, imax);
+ return 1;
+}
+
+/*
+** This function uses 'double' (instead of 'lua_Number') to ensure that
+** all bits from 'l_rand' can be represented, and that 'RANDMAX + 1.0'
+** will keep full precision (ensuring that 'r' is always less than 1.0.)
+*/
+static int math_random (lua_State *L) {
+ lua_Integer low, up;
+ double r = (double)l_rand() * (1.0 / ((double)L_RANDMAX + 1.0));
+ switch (lua_gettop(L)) { /* check number of arguments */
+ case 0: { /* no arguments */
+ lua_pushnumber(L, (lua_Number)r); /* Number between 0 and 1 */
+ return 1;
+ }
+ case 1: { /* only upper limit */
+ low = 1;
+ up = luaL_checkinteger(L, 1);
+ break;
+ }
+ case 2: { /* lower and upper limits */
+ low = luaL_checkinteger(L, 1);
+ up = luaL_checkinteger(L, 2);
+ break;
+ }
+ default: return luaL_error(L, "wrong number of arguments");
+ }
+ /* random integer in the interval [low, up] */
+ luaL_argcheck(L, low <= up, 1, "interval is empty");
+ luaL_argcheck(L, low >= 0 || up <= LUA_MAXINTEGER + low, 1,
+ "interval too large");
+ r *= (double)(up - low) + 1.0;
+ lua_pushinteger(L, (lua_Integer)r + low);
+ return 1;
+}
+
+
+static int math_randomseed (lua_State *L) {
+ l_srand((unsigned int)(lua_Integer)luaL_checknumber(L, 1));
+ (void)l_rand(); /* discard first value to avoid undesirable correlations */
+ return 0;
+}
+
+
+static int math_type (lua_State *L) {
+ if (lua_type(L, 1) == LUA_TNUMBER) {
+ if (lua_isinteger(L, 1))
+ lua_pushliteral(L, "integer");
+ else
+ lua_pushliteral(L, "float");
+ }
+ else {
+ luaL_checkany(L, 1);
+ lua_pushnil(L);
+ }
+ return 1;
+}
+
+
+/*
+** {==================================================================
+** Deprecated functions (for compatibility only)
+** ===================================================================
+*/
+#if defined(LUA_COMPAT_MATHLIB)
+
+static int math_cosh (lua_State *L) {
+ lua_pushnumber(L, l_mathop(cosh)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_sinh (lua_State *L) {
+ lua_pushnumber(L, l_mathop(sinh)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_tanh (lua_State *L) {
+ lua_pushnumber(L, l_mathop(tanh)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+static int math_pow (lua_State *L) {
+ lua_Number x = luaL_checknumber(L, 1);
+ lua_Number y = luaL_checknumber(L, 2);
+ lua_pushnumber(L, l_mathop(pow)(x, y));
+ return 1;
+}
+
+static int math_frexp (lua_State *L) {
+ int e;
+ lua_pushnumber(L, l_mathop(frexp)(luaL_checknumber(L, 1), &e));
+ lua_pushinteger(L, e);
+ return 2;
+}
+
+static int math_ldexp (lua_State *L) {
+ lua_Number x = luaL_checknumber(L, 1);
+ int ep = (int)luaL_checkinteger(L, 2);
+ lua_pushnumber(L, l_mathop(ldexp)(x, ep));
+ return 1;
+}
+
+static int math_log10 (lua_State *L) {
+ lua_pushnumber(L, l_mathop(log10)(luaL_checknumber(L, 1)));
+ return 1;
+}
+
+#endif
+/* }================================================================== */
+
+
+
+static const luaL_Reg mathlib[] = {
+ {"abs", math_abs},
+ {"acos", math_acos},
+ {"asin", math_asin},
+ {"atan", math_atan},
+ {"ceil", math_ceil},
+ {"cos", math_cos},
+ {"deg", math_deg},
+ {"exp", math_exp},
+ {"tointeger", math_toint},
+ {"floor", math_floor},
+ {"fmod", math_fmod},
+ {"ult", math_ult},
+ {"log", math_log},
+ {"max", math_max},
+ {"min", math_min},
+ {"modf", math_modf},
+ {"rad", math_rad},
+ {"random", math_random},
+ {"randomseed", math_randomseed},
+ {"sin", math_sin},
+ {"sqrt", math_sqrt},
+ {"tan", math_tan},
+ {"type", math_type},
+#if defined(LUA_COMPAT_MATHLIB)
+ {"atan2", math_atan},
+ {"cosh", math_cosh},
+ {"sinh", math_sinh},
+ {"tanh", math_tanh},
+ {"pow", math_pow},
+ {"frexp", math_frexp},
+ {"ldexp", math_ldexp},
+ {"log10", math_log10},
+#endif
+ /* placeholders */
+ {"pi", NULL},
+ {"huge", NULL},
+ {"maxinteger", NULL},
+ {"mininteger", NULL},
+ {NULL, NULL}
+};
+
+
+/*
+** Open math library
+*/
+LUAMOD_API int luaopen_math (lua_State *L) {
+ luaL_newlib(L, mathlib);
+ lua_pushnumber(L, PI);
+ lua_setfield(L, -2, "pi");
+ lua_pushnumber(L, (lua_Number)HUGE_VAL);
+ lua_setfield(L, -2, "huge");
+ lua_pushinteger(L, LUA_MAXINTEGER);
+ lua_setfield(L, -2, "maxinteger");
+ lua_pushinteger(L, LUA_MININTEGER);
+ lua_setfield(L, -2, "mininteger");
+ return 1;
+}
+
diff --git a/lua/src/lmem.c b/lua/src/lmem.c
new file mode 100644
index 000000000..0a0476cc7
--- /dev/null
+++ b/lua/src/lmem.c
@@ -0,0 +1,100 @@
+/*
+** $Id: lmem.c,v 1.91 2015/03/06 19:45:54 roberto Exp $
+** Interface to Memory Manager
+** See Copyright Notice in lua.h
+*/
+
+#define lmem_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+
+
+
+/*
+** About the realloc function:
+** void * frealloc (void *ud, void *ptr, size_t osize, size_t nsize);
+** ('osize' is the old size, 'nsize' is the new size)
+**
+** * frealloc(ud, NULL, x, s) creates a new block of size 's' (no
+** matter 'x').
+**
+** * frealloc(ud, p, x, 0) frees the block 'p'
+** (in this specific case, frealloc must return NULL);
+** particularly, frealloc(ud, NULL, 0, 0) does nothing
+** (which is equivalent to free(NULL) in ISO C)
+**
+** frealloc returns NULL if it cannot create or reallocate the area
+** (any reallocation to an equal or smaller size cannot fail!)
+*/
+
+
+
+#define MINSIZEARRAY 4
+
+
+void *luaM_growaux_ (lua_State *L, void *block, int *size, size_t size_elems,
+ int limit, const char *what) {
+ void *newblock;
+ int newsize;
+ if (*size >= limit/2) { /* cannot double it? */
+ if (*size >= limit) /* cannot grow even a little? */
+ luaG_runerror(L, "too many %s (limit is %d)", what, limit);
+ newsize = limit; /* still have at least one free place */
+ }
+ else {
+ newsize = (*size)*2;
+ if (newsize < MINSIZEARRAY)
+ newsize = MINSIZEARRAY; /* minimum size */
+ }
+ newblock = luaM_reallocv(L, block, *size, newsize, size_elems);
+ *size = newsize; /* update only when everything else is OK */
+ return newblock;
+}
+
+
+l_noret luaM_toobig (lua_State *L) {
+ luaG_runerror(L, "memory allocation error: block too big");
+}
+
+
+
+/*
+** generic allocation routine.
+*/
+void *luaM_realloc_ (lua_State *L, void *block, size_t osize, size_t nsize) {
+ void *newblock;
+ global_State *g = G(L);
+ size_t realosize = (block) ? osize : 0;
+ lua_assert((realosize == 0) == (block == NULL));
+#if defined(HARDMEMTESTS)
+ if (nsize > realosize && g->gcrunning)
+ luaC_fullgc(L, 1); /* force a GC whenever possible */
+#endif
+ newblock = (*g->frealloc)(g->ud, block, osize, nsize);
+ if (newblock == NULL && nsize > 0) {
+ lua_assert(nsize > realosize); /* cannot fail when shrinking a block */
+ if (g->version) { /* is state fully built? */
+ luaC_fullgc(L, 1); /* try to free some memory... */
+ newblock = (*g->frealloc)(g->ud, block, osize, nsize); /* try again */
+ }
+ if (newblock == NULL)
+ luaD_throw(L, LUA_ERRMEM);
+ }
+ lua_assert((nsize == 0) == (newblock == NULL));
+ g->GCdebt = (g->GCdebt + nsize) - realosize;
+ return newblock;
+}
+
diff --git a/lua/src/lmem.h b/lua/src/lmem.h
new file mode 100644
index 000000000..30f484895
--- /dev/null
+++ b/lua/src/lmem.h
@@ -0,0 +1,69 @@
+/*
+** $Id: lmem.h,v 1.43 2014/12/19 17:26:14 roberto Exp $
+** Interface to Memory Manager
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lmem_h
+#define lmem_h
+
+
+#include <stddef.h>
+
+#include "llimits.h"
+#include "lua.h"
+
+
+/*
+** This macro reallocs a vector 'b' from 'on' to 'n' elements, where
+** each element has size 'e'. In case of arithmetic overflow of the
+** product 'n'*'e', it raises an error (calling 'luaM_toobig'). Because
+** 'e' is always constant, it avoids the runtime division MAX_SIZET/(e).
+**
+** (The macro is somewhat complex to avoid warnings: The 'sizeof'
+** comparison avoids a runtime comparison when overflow cannot occur.
+** The compiler should be able to optimize the real test by itself, but
+** when it does it, it may give a warning about "comparison is always
+** false due to limited range of data type"; the +1 tricks the compiler,
+** avoiding this warning but also this optimization.)
+*/
+#define luaM_reallocv(L,b,on,n,e) \
+ (((sizeof(n) >= sizeof(size_t) && cast(size_t, (n)) + 1 > MAX_SIZET/(e)) \
+ ? luaM_toobig(L) : cast_void(0)) , \
+ luaM_realloc_(L, (b), (on)*(e), (n)*(e)))
+
+/*
+** Arrays of chars do not need any test
+*/
+#define luaM_reallocvchar(L,b,on,n) \
+ cast(char *, luaM_realloc_(L, (b), (on)*sizeof(char), (n)*sizeof(char)))
+
+#define luaM_freemem(L, b, s) luaM_realloc_(L, (b), (s), 0)
+#define luaM_free(L, b) luaM_realloc_(L, (b), sizeof(*(b)), 0)
+#define luaM_freearray(L, b, n) luaM_realloc_(L, (b), (n)*sizeof(*(b)), 0)
+
+#define luaM_malloc(L,s) luaM_realloc_(L, NULL, 0, (s))
+#define luaM_new(L,t) cast(t *, luaM_malloc(L, sizeof(t)))
+#define luaM_newvector(L,n,t) \
+ cast(t *, luaM_reallocv(L, NULL, 0, n, sizeof(t)))
+
+#define luaM_newobject(L,tag,s) luaM_realloc_(L, NULL, tag, (s))
+
+#define luaM_growvector(L,v,nelems,size,t,limit,e) \
+ if ((nelems)+1 > (size)) \
+ ((v)=cast(t *, luaM_growaux_(L,v,&(size),sizeof(t),limit,e)))
+
+#define luaM_reallocvector(L, v,oldn,n,t) \
+ ((v)=cast(t *, luaM_reallocv(L, v, oldn, n, sizeof(t))))
+
+LUAI_FUNC l_noret luaM_toobig (lua_State *L);
+
+/* not to be called directly */
+LUAI_FUNC void *luaM_realloc_ (lua_State *L, void *block, size_t oldsize,
+ size_t size);
+LUAI_FUNC void *luaM_growaux_ (lua_State *L, void *block, int *size,
+ size_t size_elem, int limit,
+ const char *what);
+
+#endif
+
diff --git a/lua/src/loadlib.c b/lua/src/loadlib.c
new file mode 100644
index 000000000..4791e748b
--- /dev/null
+++ b/lua/src/loadlib.c
@@ -0,0 +1,790 @@
+/*
+** $Id: loadlib.c,v 1.130 2017/01/12 17:14:26 roberto Exp $
+** Dynamic library loader for Lua
+** See Copyright Notice in lua.h
+**
+** This module contains an implementation of loadlib for Unix systems
+** that have dlfcn, an implementation for Windows, and a stub for other
+** systems.
+*/
+
+#define loadlib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+/*
+** LUA_IGMARK is a mark to ignore all before it when building the
+** luaopen_ function name.
+*/
+#if !defined (LUA_IGMARK)
+#define LUA_IGMARK "-"
+#endif
+
+
+/*
+** LUA_CSUBSEP is the character that replaces dots in submodule names
+** when searching for a C loader.
+** LUA_LSUBSEP is the character that replaces dots in submodule names
+** when searching for a Lua loader.
+*/
+#if !defined(LUA_CSUBSEP)
+#define LUA_CSUBSEP LUA_DIRSEP
+#endif
+
+#if !defined(LUA_LSUBSEP)
+#define LUA_LSUBSEP LUA_DIRSEP
+#endif
+
+
+/* prefix for open functions in C libraries */
+#define LUA_POF "luaopen_"
+
+/* separator for open functions in C libraries */
+#define LUA_OFSEP "_"
+
+
+/*
+** unique key for table in the registry that keeps handles
+** for all loaded C libraries
+*/
+static const int CLIBS = 0;
+
+#define LIB_FAIL "open"
+
+
+#define setprogdir(L) ((void)0)
+
+
+/*
+** system-dependent functions
+*/
+
+/*
+** unload library 'lib'
+*/
+static void lsys_unloadlib (void *lib);
+
+/*
+** load C library in file 'path'. If 'seeglb', load with all names in
+** the library global.
+** Returns the library; in case of error, returns NULL plus an
+** error string in the stack.
+*/
+static void *lsys_load (lua_State *L, const char *path, int seeglb);
+
+/*
+** Try to find a function named 'sym' in library 'lib'.
+** Returns the function; in case of error, returns NULL plus an
+** error string in the stack.
+*/
+static lua_CFunction lsys_sym (lua_State *L, void *lib, const char *sym);
+
+
+
+
+#if defined(LUA_USE_DLOPEN) /* { */
+/*
+** {========================================================================
+** This is an implementation of loadlib based on the dlfcn interface.
+** The dlfcn interface is available in Linux, SunOS, Solaris, IRIX, FreeBSD,
+** NetBSD, AIX 4.2, HPUX 11, and probably most other Unix flavors, at least
+** as an emulation layer on top of native functions.
+** =========================================================================
+*/
+
+#include <dlfcn.h>
+
+/*
+** Macro to convert pointer-to-void* to pointer-to-function. This cast
+** is undefined according to ISO C, but POSIX assumes that it works.
+** (The '__extension__' in gnu compilers is only to avoid warnings.)
+*/
+#if defined(__GNUC__)
+#define cast_func(p) (__extension__ (lua_CFunction)(p))
+#else
+#define cast_func(p) ((lua_CFunction)(p))
+#endif
+
+
+static void lsys_unloadlib (void *lib) {
+ dlclose(lib);
+}
+
+
+static void *lsys_load (lua_State *L, const char *path, int seeglb) {
+ void *lib = dlopen(path, RTLD_NOW | (seeglb ? RTLD_GLOBAL : RTLD_LOCAL));
+ if (lib == NULL) lua_pushstring(L, dlerror());
+ return lib;
+}
+
+
+static lua_CFunction lsys_sym (lua_State *L, void *lib, const char *sym) {
+ lua_CFunction f = cast_func(dlsym(lib, sym));
+ if (f == NULL) lua_pushstring(L, dlerror());
+ return f;
+}
+
+/* }====================================================== */
+
+
+
+#elif defined(LUA_DL_DLL) /* }{ */
+/*
+** {======================================================================
+** This is an implementation of loadlib for Windows using native functions.
+** =======================================================================
+*/
+
+#include <windows.h>
+
+
+/*
+** optional flags for LoadLibraryEx
+*/
+#if !defined(LUA_LLE_FLAGS)
+#define LUA_LLE_FLAGS 0
+#endif
+
+
+#undef setprogdir
+
+
+/*
+** Replace in the path (on the top of the stack) any occurrence
+** of LUA_EXEC_DIR with the executable's path.
+*/
+static void setprogdir (lua_State *L) {
+ char buff[MAX_PATH + 1];
+ char *lb;
+ DWORD nsize = sizeof(buff)/sizeof(char);
+ DWORD n = GetModuleFileNameA(NULL, buff, nsize); /* get exec. name */
+ if (n == 0 || n == nsize || (lb = strrchr(buff, '\\')) == NULL)
+ luaL_error(L, "unable to get ModuleFileName");
+ else {
+ *lb = '\0'; /* cut name on the last '\\' to get the path */
+ luaL_gsub(L, lua_tostring(L, -1), LUA_EXEC_DIR, buff);
+ lua_remove(L, -2); /* remove original string */
+ }
+}
+
+
+
+
+static void pusherror (lua_State *L) {
+ int error = GetLastError();
+ char buffer[128];
+ if (FormatMessageA(FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, error, 0, buffer, sizeof(buffer)/sizeof(char), NULL))
+ lua_pushstring(L, buffer);
+ else
+ lua_pushfstring(L, "system error %d\n", error);
+}
+
+static void lsys_unloadlib (void *lib) {
+ FreeLibrary((HMODULE)lib);
+}
+
+
+static void *lsys_load (lua_State *L, const char *path, int seeglb) {
+ HMODULE lib = LoadLibraryExA(path, NULL, LUA_LLE_FLAGS);
+ (void)(seeglb); /* not used: symbols are 'global' by default */
+ if (lib == NULL) pusherror(L);
+ return lib;
+}
+
+
+static lua_CFunction lsys_sym (lua_State *L, void *lib, const char *sym) {
+ lua_CFunction f = (lua_CFunction)GetProcAddress((HMODULE)lib, sym);
+ if (f == NULL) pusherror(L);
+ return f;
+}
+
+/* }====================================================== */
+
+
+#else /* }{ */
+/*
+** {======================================================
+** Fallback for other systems
+** =======================================================
+*/
+
+#undef LIB_FAIL
+#define LIB_FAIL "absent"
+
+
+#define DLMSG "dynamic libraries not enabled; check your Lua installation"
+
+
+static void lsys_unloadlib (void *lib) {
+ (void)(lib); /* not used */
+}
+
+
+static void *lsys_load (lua_State *L, const char *path, int seeglb) {
+ (void)(path); (void)(seeglb); /* not used */
+ lua_pushliteral(L, DLMSG);
+ return NULL;
+}
+
+
+static lua_CFunction lsys_sym (lua_State *L, void *lib, const char *sym) {
+ (void)(lib); (void)(sym); /* not used */
+ lua_pushliteral(L, DLMSG);
+ return NULL;
+}
+
+/* }====================================================== */
+#endif /* } */
+
+
+/*
+** {==================================================================
+** Set Paths
+** ===================================================================
+*/
+
+/*
+** LUA_PATH_VAR and LUA_CPATH_VAR are the names of the environment
+** variables that Lua check to set its paths.
+*/
+#if !defined(LUA_PATH_VAR)
+#define LUA_PATH_VAR "LUA_PATH"
+#endif
+
+#if !defined(LUA_CPATH_VAR)
+#define LUA_CPATH_VAR "LUA_CPATH"
+#endif
+
+
+#define AUXMARK "\1" /* auxiliary mark */
+
+
+/*
+** return registry.LUA_NOENV as a boolean
+*/
+static int noenv (lua_State *L) {
+ int b;
+ lua_getfield(L, LUA_REGISTRYINDEX, "LUA_NOENV");
+ b = lua_toboolean(L, -1);
+ lua_pop(L, 1); /* remove value */
+ return b;
+}
+
+
+/*
+** Set a path
+*/
+static void setpath (lua_State *L, const char *fieldname,
+ const char *envname,
+ const char *dft) {
+ const char *nver = lua_pushfstring(L, "%s%s", envname, LUA_VERSUFFIX);
+ const char *path = getenv(nver); /* use versioned name */
+ if (path == NULL) /* no environment variable? */
+ path = getenv(envname); /* try unversioned name */
+ if (path == NULL || noenv(L)) /* no environment variable? */
+ lua_pushstring(L, dft); /* use default */
+ else {
+ /* replace ";;" by ";AUXMARK;" and then AUXMARK by default path */
+ path = luaL_gsub(L, path, LUA_PATH_SEP LUA_PATH_SEP,
+ LUA_PATH_SEP AUXMARK LUA_PATH_SEP);
+ luaL_gsub(L, path, AUXMARK, dft);
+ lua_remove(L, -2); /* remove result from 1st 'gsub' */
+ }
+ setprogdir(L);
+ lua_setfield(L, -3, fieldname); /* package[fieldname] = path value */
+ lua_pop(L, 1); /* pop versioned variable name */
+}
+
+/* }================================================================== */
+
+
+/*
+** return registry.CLIBS[path]
+*/
+static void *checkclib (lua_State *L, const char *path) {
+ void *plib;
+ lua_rawgetp(L, LUA_REGISTRYINDEX, &CLIBS);
+ lua_getfield(L, -1, path);
+ plib = lua_touserdata(L, -1); /* plib = CLIBS[path] */
+ lua_pop(L, 2); /* pop CLIBS table and 'plib' */
+ return plib;
+}
+
+
+/*
+** registry.CLIBS[path] = plib -- for queries
+** registry.CLIBS[#CLIBS + 1] = plib -- also keep a list of all libraries
+*/
+static void addtoclib (lua_State *L, const char *path, void *plib) {
+ lua_rawgetp(L, LUA_REGISTRYINDEX, &CLIBS);
+ lua_pushlightuserdata(L, plib);
+ lua_pushvalue(L, -1);
+ lua_setfield(L, -3, path); /* CLIBS[path] = plib */
+ lua_rawseti(L, -2, luaL_len(L, -2) + 1); /* CLIBS[#CLIBS + 1] = plib */
+ lua_pop(L, 1); /* pop CLIBS table */
+}
+
+
+/*
+** __gc tag method for CLIBS table: calls 'lsys_unloadlib' for all lib
+** handles in list CLIBS
+*/
+static int gctm (lua_State *L) {
+ lua_Integer n = luaL_len(L, 1);
+ for (; n >= 1; n--) { /* for each handle, in reverse order */
+ lua_rawgeti(L, 1, n); /* get handle CLIBS[n] */
+ lsys_unloadlib(lua_touserdata(L, -1));
+ lua_pop(L, 1); /* pop handle */
+ }
+ return 0;
+}
+
+
+
+/* error codes for 'lookforfunc' */
+#define ERRLIB 1
+#define ERRFUNC 2
+
+/*
+** Look for a C function named 'sym' in a dynamically loaded library
+** 'path'.
+** First, check whether the library is already loaded; if not, try
+** to load it.
+** Then, if 'sym' is '*', return true (as library has been loaded).
+** Otherwise, look for symbol 'sym' in the library and push a
+** C function with that symbol.
+** Return 0 and 'true' or a function in the stack; in case of
+** errors, return an error code and an error message in the stack.
+*/
+static int lookforfunc (lua_State *L, const char *path, const char *sym) {
+ void *reg = checkclib(L, path); /* check loaded C libraries */
+ if (reg == NULL) { /* must load library? */
+ reg = lsys_load(L, path, *sym == '*'); /* global symbols if 'sym'=='*' */
+ if (reg == NULL) return ERRLIB; /* unable to load library */
+ addtoclib(L, path, reg);
+ }
+ if (*sym == '*') { /* loading only library (no function)? */
+ lua_pushboolean(L, 1); /* return 'true' */
+ return 0; /* no errors */
+ }
+ else {
+ lua_CFunction f = lsys_sym(L, reg, sym);
+ if (f == NULL)
+ return ERRFUNC; /* unable to find function */
+ lua_pushcfunction(L, f); /* else create new function */
+ return 0; /* no errors */
+ }
+}
+
+
+static int ll_loadlib (lua_State *L) {
+ const char *path = luaL_checkstring(L, 1);
+ const char *init = luaL_checkstring(L, 2);
+ int stat = lookforfunc(L, path, init);
+ if (stat == 0) /* no errors? */
+ return 1; /* return the loaded function */
+ else { /* error; error message is on stack top */
+ lua_pushnil(L);
+ lua_insert(L, -2);
+ lua_pushstring(L, (stat == ERRLIB) ? LIB_FAIL : "init");
+ return 3; /* return nil, error message, and where */
+ }
+}
+
+
+
+/*
+** {======================================================
+** 'require' function
+** =======================================================
+*/
+
+
+static int readable (const char *filename) {
+ FILE *f = fopen(filename, "r"); /* try to open file */
+ if (f == NULL) return 0; /* open failed */
+ fclose(f);
+ return 1;
+}
+
+
+static const char *pushnexttemplate (lua_State *L, const char *path) {
+ const char *l;
+ while (*path == *LUA_PATH_SEP) path++; /* skip separators */
+ if (*path == '\0') return NULL; /* no more templates */
+ l = strchr(path, *LUA_PATH_SEP); /* find next separator */
+ if (l == NULL) l = path + strlen(path);
+ lua_pushlstring(L, path, l - path); /* template */
+ return l;
+}
+
+
+static const char *searchpath (lua_State *L, const char *name,
+ const char *path,
+ const char *sep,
+ const char *dirsep) {
+ luaL_Buffer msg; /* to build error message */
+ luaL_buffinit(L, &msg);
+ if (*sep != '\0') /* non-empty separator? */
+ name = luaL_gsub(L, name, sep, dirsep); /* replace it by 'dirsep' */
+ while ((path = pushnexttemplate(L, path)) != NULL) {
+ const char *filename = luaL_gsub(L, lua_tostring(L, -1),
+ LUA_PATH_MARK, name);
+ lua_remove(L, -2); /* remove path template */
+ if (readable(filename)) /* does file exist and is readable? */
+ return filename; /* return that file name */
+ lua_pushfstring(L, "\n\tno file '%s'", filename);
+ lua_remove(L, -2); /* remove file name */
+ luaL_addvalue(&msg); /* concatenate error msg. entry */
+ }
+ luaL_pushresult(&msg); /* create error message */
+ return NULL; /* not found */
+}
+
+
+static int ll_searchpath (lua_State *L) {
+ const char *f = searchpath(L, luaL_checkstring(L, 1),
+ luaL_checkstring(L, 2),
+ luaL_optstring(L, 3, "."),
+ luaL_optstring(L, 4, LUA_DIRSEP));
+ if (f != NULL) return 1;
+ else { /* error message is on top of the stack */
+ lua_pushnil(L);
+ lua_insert(L, -2);
+ return 2; /* return nil + error message */
+ }
+}
+
+
+static const char *findfile (lua_State *L, const char *name,
+ const char *pname,
+ const char *dirsep) {
+ const char *path;
+ lua_getfield(L, lua_upvalueindex(1), pname);
+ path = lua_tostring(L, -1);
+ if (path == NULL)
+ luaL_error(L, "'package.%s' must be a string", pname);
+ return searchpath(L, name, path, ".", dirsep);
+}
+
+
+static int checkload (lua_State *L, int stat, const char *filename) {
+ if (stat) { /* module loaded successfully? */
+ lua_pushstring(L, filename); /* will be 2nd argument to module */
+ return 2; /* return open function and file name */
+ }
+ else
+ return luaL_error(L, "error loading module '%s' from file '%s':\n\t%s",
+ lua_tostring(L, 1), filename, lua_tostring(L, -1));
+}
+
+
+static int searcher_Lua (lua_State *L) {
+ const char *filename;
+ const char *name = luaL_checkstring(L, 1);
+ filename = findfile(L, name, "path", LUA_LSUBSEP);
+ if (filename == NULL) return 1; /* module not found in this path */
+ return checkload(L, (luaL_loadfile(L, filename) == LUA_OK), filename);
+}
+
+
+/*
+** Try to find a load function for module 'modname' at file 'filename'.
+** First, change '.' to '_' in 'modname'; then, if 'modname' has
+** the form X-Y (that is, it has an "ignore mark"), build a function
+** name "luaopen_X" and look for it. (For compatibility, if that
+** fails, it also tries "luaopen_Y".) If there is no ignore mark,
+** look for a function named "luaopen_modname".
+*/
+static int loadfunc (lua_State *L, const char *filename, const char *modname) {
+ const char *openfunc;
+ const char *mark;
+ modname = luaL_gsub(L, modname, ".", LUA_OFSEP);
+ mark = strchr(modname, *LUA_IGMARK);
+ if (mark) {
+ int stat;
+ openfunc = lua_pushlstring(L, modname, mark - modname);
+ openfunc = lua_pushfstring(L, LUA_POF"%s", openfunc);
+ stat = lookforfunc(L, filename, openfunc);
+ if (stat != ERRFUNC) return stat;
+ modname = mark + 1; /* else go ahead and try old-style name */
+ }
+ openfunc = lua_pushfstring(L, LUA_POF"%s", modname);
+ return lookforfunc(L, filename, openfunc);
+}
+
+
+static int searcher_C (lua_State *L) {
+ const char *name = luaL_checkstring(L, 1);
+ const char *filename = findfile(L, name, "cpath", LUA_CSUBSEP);
+ if (filename == NULL) return 1; /* module not found in this path */
+ return checkload(L, (loadfunc(L, filename, name) == 0), filename);
+}
+
+
+static int searcher_Croot (lua_State *L) {
+ const char *filename;
+ const char *name = luaL_checkstring(L, 1);
+ const char *p = strchr(name, '.');
+ int stat;
+ if (p == NULL) return 0; /* is root */
+ lua_pushlstring(L, name, p - name);
+ filename = findfile(L, lua_tostring(L, -1), "cpath", LUA_CSUBSEP);
+ if (filename == NULL) return 1; /* root not found */
+ if ((stat = loadfunc(L, filename, name)) != 0) {
+ if (stat != ERRFUNC)
+ return checkload(L, 0, filename); /* real error */
+ else { /* open function not found */
+ lua_pushfstring(L, "\n\tno module '%s' in file '%s'", name, filename);
+ return 1;
+ }
+ }
+ lua_pushstring(L, filename); /* will be 2nd argument to module */
+ return 2;
+}
+
+
+static int searcher_preload (lua_State *L) {
+ const char *name = luaL_checkstring(L, 1);
+ lua_getfield(L, LUA_REGISTRYINDEX, LUA_PRELOAD_TABLE);
+ if (lua_getfield(L, -1, name) == LUA_TNIL) /* not found? */
+ lua_pushfstring(L, "\n\tno field package.preload['%s']", name);
+ return 1;
+}
+
+
+static void findloader (lua_State *L, const char *name) {
+ int i;
+ luaL_Buffer msg; /* to build error message */
+ luaL_buffinit(L, &msg);
+ /* push 'package.searchers' to index 3 in the stack */
+ if (lua_getfield(L, lua_upvalueindex(1), "searchers") != LUA_TTABLE)
+ luaL_error(L, "'package.searchers' must be a table");
+ /* iterate over available searchers to find a loader */
+ for (i = 1; ; i++) {
+ if (lua_rawgeti(L, 3, i) == LUA_TNIL) { /* no more searchers? */
+ lua_pop(L, 1); /* remove nil */
+ luaL_pushresult(&msg); /* create error message */
+ luaL_error(L, "module '%s' not found:%s", name, lua_tostring(L, -1));
+ }
+ lua_pushstring(L, name);
+ lua_call(L, 1, 2); /* call it */
+ if (lua_isfunction(L, -2)) /* did it find a loader? */
+ return; /* module loader found */
+ else if (lua_isstring(L, -2)) { /* searcher returned error message? */
+ lua_pop(L, 1); /* remove extra return */
+ luaL_addvalue(&msg); /* concatenate error message */
+ }
+ else
+ lua_pop(L, 2); /* remove both returns */
+ }
+}
+
+
+static int ll_require (lua_State *L) {
+ const char *name = luaL_checkstring(L, 1);
+ lua_settop(L, 1); /* LOADED table will be at index 2 */
+ lua_getfield(L, LUA_REGISTRYINDEX, LUA_LOADED_TABLE);
+ lua_getfield(L, 2, name); /* LOADED[name] */
+ if (lua_toboolean(L, -1)) /* is it there? */
+ return 1; /* package is already loaded */
+ /* else must load package */
+ lua_pop(L, 1); /* remove 'getfield' result */
+ findloader(L, name);
+ lua_pushstring(L, name); /* pass name as argument to module loader */
+ lua_insert(L, -2); /* name is 1st argument (before search data) */
+ lua_call(L, 2, 1); /* run loader to load module */
+ if (!lua_isnil(L, -1)) /* non-nil return? */
+ lua_setfield(L, 2, name); /* LOADED[name] = returned value */
+ if (lua_getfield(L, 2, name) == LUA_TNIL) { /* module set no value? */
+ lua_pushboolean(L, 1); /* use true as result */
+ lua_pushvalue(L, -1); /* extra copy to be returned */
+ lua_setfield(L, 2, name); /* LOADED[name] = true */
+ }
+ return 1;
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** 'module' function
+** =======================================================
+*/
+#if defined(LUA_COMPAT_MODULE)
+
+/*
+** changes the environment variable of calling function
+*/
+static void set_env (lua_State *L) {
+ lua_Debug ar;
+ if (lua_getstack(L, 1, &ar) == 0 ||
+ lua_getinfo(L, "f", &ar) == 0 || /* get calling function */
+ lua_iscfunction(L, -1))
+ luaL_error(L, "'module' not called from a Lua function");
+ lua_pushvalue(L, -2); /* copy new environment table to top */
+ lua_setupvalue(L, -2, 1);
+ lua_pop(L, 1); /* remove function */
+}
+
+
+static void dooptions (lua_State *L, int n) {
+ int i;
+ for (i = 2; i <= n; i++) {
+ if (lua_isfunction(L, i)) { /* avoid 'calling' extra info. */
+ lua_pushvalue(L, i); /* get option (a function) */
+ lua_pushvalue(L, -2); /* module */
+ lua_call(L, 1, 0);
+ }
+ }
+}
+
+
+static void modinit (lua_State *L, const char *modname) {
+ const char *dot;
+ lua_pushvalue(L, -1);
+ lua_setfield(L, -2, "_M"); /* module._M = module */
+ lua_pushstring(L, modname);
+ lua_setfield(L, -2, "_NAME");
+ dot = strrchr(modname, '.'); /* look for last dot in module name */
+ if (dot == NULL) dot = modname;
+ else dot++;
+ /* set _PACKAGE as package name (full module name minus last part) */
+ lua_pushlstring(L, modname, dot - modname);
+ lua_setfield(L, -2, "_PACKAGE");
+}
+
+
+static int ll_module (lua_State *L) {
+ const char *modname = luaL_checkstring(L, 1);
+ int lastarg = lua_gettop(L); /* last parameter */
+ luaL_pushmodule(L, modname, 1); /* get/create module table */
+ /* check whether table already has a _NAME field */
+ if (lua_getfield(L, -1, "_NAME") != LUA_TNIL)
+ lua_pop(L, 1); /* table is an initialized module */
+ else { /* no; initialize it */
+ lua_pop(L, 1);
+ modinit(L, modname);
+ }
+ lua_pushvalue(L, -1);
+ set_env(L);
+ dooptions(L, lastarg);
+ return 1;
+}
+
+
+static int ll_seeall (lua_State *L) {
+ luaL_checktype(L, 1, LUA_TTABLE);
+ if (!lua_getmetatable(L, 1)) {
+ lua_createtable(L, 0, 1); /* create new metatable */
+ lua_pushvalue(L, -1);
+ lua_setmetatable(L, 1);
+ }
+ lua_pushglobaltable(L);
+ lua_setfield(L, -2, "__index"); /* mt.__index = _G */
+ return 0;
+}
+
+#endif
+/* }====================================================== */
+
+
+
+static const luaL_Reg pk_funcs[] = {
+ {"loadlib", ll_loadlib},
+ {"searchpath", ll_searchpath},
+#if defined(LUA_COMPAT_MODULE)
+ {"seeall", ll_seeall},
+#endif
+ /* placeholders */
+ {"preload", NULL},
+ {"cpath", NULL},
+ {"path", NULL},
+ {"searchers", NULL},
+ {"loaded", NULL},
+ {NULL, NULL}
+};
+
+
+static const luaL_Reg ll_funcs[] = {
+#if defined(LUA_COMPAT_MODULE)
+ {"module", ll_module},
+#endif
+ {"require", ll_require},
+ {NULL, NULL}
+};
+
+
+static void createsearcherstable (lua_State *L) {
+ static const lua_CFunction searchers[] =
+ {searcher_preload, searcher_Lua, searcher_C, searcher_Croot, NULL};
+ int i;
+ /* create 'searchers' table */
+ lua_createtable(L, sizeof(searchers)/sizeof(searchers[0]) - 1, 0);
+ /* fill it with predefined searchers */
+ for (i=0; searchers[i] != NULL; i++) {
+ lua_pushvalue(L, -2); /* set 'package' as upvalue for all searchers */
+ lua_pushcclosure(L, searchers[i], 1);
+ lua_rawseti(L, -2, i+1);
+ }
+#if defined(LUA_COMPAT_LOADERS)
+ lua_pushvalue(L, -1); /* make a copy of 'searchers' table */
+ lua_setfield(L, -3, "loaders"); /* put it in field 'loaders' */
+#endif
+ lua_setfield(L, -2, "searchers"); /* put it in field 'searchers' */
+}
+
+
+/*
+** create table CLIBS to keep track of loaded C libraries,
+** setting a finalizer to close all libraries when closing state.
+*/
+static void createclibstable (lua_State *L) {
+ lua_newtable(L); /* create CLIBS table */
+ lua_createtable(L, 0, 1); /* create metatable for CLIBS */
+ lua_pushcfunction(L, gctm);
+ lua_setfield(L, -2, "__gc"); /* set finalizer for CLIBS table */
+ lua_setmetatable(L, -2);
+ lua_rawsetp(L, LUA_REGISTRYINDEX, &CLIBS); /* set CLIBS table in registry */
+}
+
+
+LUAMOD_API int luaopen_package (lua_State *L) {
+ createclibstable(L);
+ luaL_newlib(L, pk_funcs); /* create 'package' table */
+ createsearcherstable(L);
+ /* set paths */
+ setpath(L, "path", LUA_PATH_VAR, LUA_PATH_DEFAULT);
+ setpath(L, "cpath", LUA_CPATH_VAR, LUA_CPATH_DEFAULT);
+ /* store config information */
+ lua_pushliteral(L, LUA_DIRSEP "\n" LUA_PATH_SEP "\n" LUA_PATH_MARK "\n"
+ LUA_EXEC_DIR "\n" LUA_IGMARK "\n");
+ lua_setfield(L, -2, "config");
+ /* set field 'loaded' */
+ luaL_getsubtable(L, LUA_REGISTRYINDEX, LUA_LOADED_TABLE);
+ lua_setfield(L, -2, "loaded");
+ /* set field 'preload' */
+ luaL_getsubtable(L, LUA_REGISTRYINDEX, LUA_PRELOAD_TABLE);
+ lua_setfield(L, -2, "preload");
+ lua_pushglobaltable(L);
+ lua_pushvalue(L, -2); /* set 'package' as upvalue for next lib */
+ luaL_setfuncs(L, ll_funcs, 1); /* open lib into global table */
+ lua_pop(L, 1); /* pop global table */
+ return 1; /* return 'package' table */
+}
+
diff --git a/lua/src/lobject.c b/lua/src/lobject.c
new file mode 100644
index 000000000..2da76899a
--- /dev/null
+++ b/lua/src/lobject.c
@@ -0,0 +1,521 @@
+/*
+** $Id: lobject.c,v 2.113 2016/12/22 13:08:50 roberto Exp $
+** Some generic functions over Lua objects
+** See Copyright Notice in lua.h
+*/
+
+#define lobject_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <locale.h>
+#include <math.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lctype.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "lvm.h"
+
+
+
+LUAI_DDEF const TValue luaO_nilobject_ = {NILCONSTANT};
+
+
+/*
+** converts an integer to a "floating point byte", represented as
+** (eeeeexxx), where the real value is (1xxx) * 2^(eeeee - 1) if
+** eeeee != 0 and (xxx) otherwise.
+*/
+int luaO_int2fb (unsigned int x) {
+ int e = 0; /* exponent */
+ if (x < 8) return x;
+ while (x >= (8 << 4)) { /* coarse steps */
+ x = (x + 0xf) >> 4; /* x = ceil(x / 16) */
+ e += 4;
+ }
+ while (x >= (8 << 1)) { /* fine steps */
+ x = (x + 1) >> 1; /* x = ceil(x / 2) */
+ e++;
+ }
+ return ((e+1) << 3) | (cast_int(x) - 8);
+}
+
+
+/* converts back */
+int luaO_fb2int (int x) {
+ return (x < 8) ? x : ((x & 7) + 8) << ((x >> 3) - 1);
+}
+
+
+/*
+** Computes ceil(log2(x))
+*/
+int luaO_ceillog2 (unsigned int x) {
+ static const lu_byte log_2[256] = { /* log_2[i] = ceil(log2(i - 1)) */
+ 0,1,2,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+ 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+ 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
+ 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
+ 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
+ 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
+ 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
+ 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8
+ };
+ int l = 0;
+ x--;
+ while (x >= 256) { l += 8; x >>= 8; }
+ return l + log_2[x];
+}
+
+
+static lua_Integer intarith (lua_State *L, int op, lua_Integer v1,
+ lua_Integer v2) {
+ switch (op) {
+ case LUA_OPADD: return intop(+, v1, v2);
+ case LUA_OPSUB:return intop(-, v1, v2);
+ case LUA_OPMUL:return intop(*, v1, v2);
+ case LUA_OPMOD: return luaV_mod(L, v1, v2);
+ case LUA_OPIDIV: return luaV_div(L, v1, v2);
+ case LUA_OPBAND: return intop(&, v1, v2);
+ case LUA_OPBOR: return intop(|, v1, v2);
+ case LUA_OPBXOR: return intop(^, v1, v2);
+ case LUA_OPSHL: return luaV_shiftl(v1, v2);
+ case LUA_OPSHR: return luaV_shiftl(v1, -v2);
+ case LUA_OPUNM: return intop(-, 0, v1);
+ case LUA_OPBNOT: return intop(^, ~l_castS2U(0), v1);
+ default: lua_assert(0); return 0;
+ }
+}
+
+
+static lua_Number numarith (lua_State *L, int op, lua_Number v1,
+ lua_Number v2) {
+ switch (op) {
+ case LUA_OPADD: return luai_numadd(L, v1, v2);
+ case LUA_OPSUB: return luai_numsub(L, v1, v2);
+ case LUA_OPMUL: return luai_nummul(L, v1, v2);
+ case LUA_OPDIV: return luai_numdiv(L, v1, v2);
+ case LUA_OPPOW: return luai_numpow(L, v1, v2);
+ case LUA_OPIDIV: return luai_numidiv(L, v1, v2);
+ case LUA_OPUNM: return luai_numunm(L, v1);
+ case LUA_OPMOD: {
+ lua_Number m;
+ luai_nummod(L, v1, v2, m);
+ return m;
+ }
+ default: lua_assert(0); return 0;
+ }
+}
+
+
+void luaO_arith (lua_State *L, int op, const TValue *p1, const TValue *p2,
+ TValue *res) {
+ switch (op) {
+ case LUA_OPBAND: case LUA_OPBOR: case LUA_OPBXOR:
+ case LUA_OPSHL: case LUA_OPSHR:
+ case LUA_OPBNOT: { /* operate only on integers */
+ lua_Integer i1; lua_Integer i2;
+ if (tointeger(p1, &i1) && tointeger(p2, &i2)) {
+ setivalue(res, intarith(L, op, i1, i2));
+ return;
+ }
+ else break; /* go to the end */
+ }
+ case LUA_OPDIV: case LUA_OPPOW: { /* operate only on floats */
+ lua_Number n1; lua_Number n2;
+ if (tonumber(p1, &n1) && tonumber(p2, &n2)) {
+ setfltvalue(res, numarith(L, op, n1, n2));
+ return;
+ }
+ else break; /* go to the end */
+ }
+ default: { /* other operations */
+ lua_Number n1; lua_Number n2;
+ if (ttisinteger(p1) && ttisinteger(p2)) {
+ setivalue(res, intarith(L, op, ivalue(p1), ivalue(p2)));
+ return;
+ }
+ else if (tonumber(p1, &n1) && tonumber(p2, &n2)) {
+ setfltvalue(res, numarith(L, op, n1, n2));
+ return;
+ }
+ else break; /* go to the end */
+ }
+ }
+ /* could not perform raw operation; try metamethod */
+ lua_assert(L != NULL); /* should not fail when folding (compile time) */
+ luaT_trybinTM(L, p1, p2, res, cast(TMS, (op - LUA_OPADD) + TM_ADD));
+}
+
+
+int luaO_hexavalue (int c) {
+ if (lisdigit(c)) return c - '0';
+ else return (ltolower(c) - 'a') + 10;
+}
+
+
+static int isneg (const char **s) {
+ if (**s == '-') { (*s)++; return 1; }
+ else if (**s == '+') (*s)++;
+ return 0;
+}
+
+
+
+/*
+** {==================================================================
+** Lua's implementation for 'lua_strx2number'
+** ===================================================================
+*/
+
+#if !defined(lua_strx2number)
+
+/* maximum number of significant digits to read (to avoid overflows
+ even with single floats) */
+#define MAXSIGDIG 30
+
+/*
+** convert an hexadecimal numeric string to a number, following
+** C99 specification for 'strtod'
+*/
+static lua_Number lua_strx2number (const char *s, char **endptr) {
+ int dot = lua_getlocaledecpoint();
+ lua_Number r = 0.0; /* result (accumulator) */
+ int sigdig = 0; /* number of significant digits */
+ int nosigdig = 0; /* number of non-significant digits */
+ int e = 0; /* exponent correction */
+ int neg; /* 1 if number is negative */
+ int hasdot = 0; /* true after seen a dot */
+ *endptr = cast(char *, s); /* nothing is valid yet */
+ while (lisspace(cast_uchar(*s))) s++; /* skip initial spaces */
+ neg = isneg(&s); /* check signal */
+ if (!(*s == '0' && (*(s + 1) == 'x' || *(s + 1) == 'X'))) /* check '0x' */
+ return 0.0; /* invalid format (no '0x') */
+ for (s += 2; ; s++) { /* skip '0x' and read numeral */
+ if (*s == dot) {
+ if (hasdot) break; /* second dot? stop loop */
+ else hasdot = 1;
+ }
+ else if (lisxdigit(cast_uchar(*s))) {
+ if (sigdig == 0 && *s == '0') /* non-significant digit (zero)? */
+ nosigdig++;
+ else if (++sigdig <= MAXSIGDIG) /* can read it without overflow? */
+ r = (r * cast_num(16.0)) + luaO_hexavalue(*s);
+ else e++; /* too many digits; ignore, but still count for exponent */
+ if (hasdot) e--; /* decimal digit? correct exponent */
+ }
+ else break; /* neither a dot nor a digit */
+ }
+ if (nosigdig + sigdig == 0) /* no digits? */
+ return 0.0; /* invalid format */
+ *endptr = cast(char *, s); /* valid up to here */
+ e *= 4; /* each digit multiplies/divides value by 2^4 */
+ if (*s == 'p' || *s == 'P') { /* exponent part? */
+ int exp1 = 0; /* exponent value */
+ int neg1; /* exponent signal */
+ s++; /* skip 'p' */
+ neg1 = isneg(&s); /* signal */
+ if (!lisdigit(cast_uchar(*s)))
+ return 0.0; /* invalid; must have at least one digit */
+ while (lisdigit(cast_uchar(*s))) /* read exponent */
+ exp1 = exp1 * 10 + *(s++) - '0';
+ if (neg1) exp1 = -exp1;
+ e += exp1;
+ *endptr = cast(char *, s); /* valid up to here */
+ }
+ if (neg) r = -r;
+ return l_mathop(ldexp)(r, e);
+}
+
+#endif
+/* }====================================================== */
+
+
+/* maximum length of a numeral */
+#if !defined (L_MAXLENNUM)
+#define L_MAXLENNUM 200
+#endif
+
+static const char *l_str2dloc (const char *s, lua_Number *result, int mode) {
+ char *endptr;
+ *result = (mode == 'x') ? lua_strx2number(s, &endptr) /* try to convert */
+ : lua_str2number(s, &endptr);
+ if (endptr == s) return NULL; /* nothing recognized? */
+ while (lisspace(cast_uchar(*endptr))) endptr++; /* skip trailing spaces */
+ return (*endptr == '\0') ? endptr : NULL; /* OK if no trailing characters */
+}
+
+
+/*
+** Convert string 's' to a Lua number (put in 'result'). Return NULL
+** on fail or the address of the ending '\0' on success.
+** 'pmode' points to (and 'mode' contains) special things in the string:
+** - 'x'/'X' means an hexadecimal numeral
+** - 'n'/'N' means 'inf' or 'nan' (which should be rejected)
+** - '.' just optimizes the search for the common case (nothing special)
+** This function accepts both the current locale or a dot as the radix
+** mark. If the convertion fails, it may mean number has a dot but
+** locale accepts something else. In that case, the code copies 's'
+** to a buffer (because 's' is read-only), changes the dot to the
+** current locale radix mark, and tries to convert again.
+*/
+static const char *l_str2d (const char *s, lua_Number *result) {
+ const char *endptr;
+ const char *pmode = strpbrk(s, ".xXnN");
+ int mode = pmode ? ltolower(cast_uchar(*pmode)) : 0;
+ if (mode == 'n') /* reject 'inf' and 'nan' */
+ return NULL;
+ endptr = l_str2dloc(s, result, mode); /* try to convert */
+ if (endptr == NULL) { /* failed? may be a different locale */
+ char buff[L_MAXLENNUM + 1];
+ const char *pdot = strchr(s, '.');
+ if (strlen(s) > L_MAXLENNUM || pdot == NULL)
+ return NULL; /* string too long or no dot; fail */
+ strcpy(buff, s); /* copy string to buffer */
+ buff[pdot - s] = lua_getlocaledecpoint(); /* correct decimal point */
+ endptr = l_str2dloc(buff, result, mode); /* try again */
+ if (endptr != NULL)
+ endptr = s + (endptr - buff); /* make relative to 's' */
+ }
+ return endptr;
+}
+
+
+#define MAXBY10 cast(lua_Unsigned, LUA_MAXINTEGER / 10)
+#define MAXLASTD cast_int(LUA_MAXINTEGER % 10)
+
+static const char *l_str2int (const char *s, lua_Integer *result) {
+ lua_Unsigned a = 0;
+ int empty = 1;
+ int neg;
+ while (lisspace(cast_uchar(*s))) s++; /* skip initial spaces */
+ neg = isneg(&s);
+ if (s[0] == '0' &&
+ (s[1] == 'x' || s[1] == 'X')) { /* hex? */
+ s += 2; /* skip '0x' */
+ for (; lisxdigit(cast_uchar(*s)); s++) {
+ a = a * 16 + luaO_hexavalue(*s);
+ empty = 0;
+ }
+ }
+ else { /* decimal */
+ for (; lisdigit(cast_uchar(*s)); s++) {
+ int d = *s - '0';
+ if (a >= MAXBY10 && (a > MAXBY10 || d > MAXLASTD + neg)) /* overflow? */
+ return NULL; /* do not accept it (as integer) */
+ a = a * 10 + d;
+ empty = 0;
+ }
+ }
+ while (lisspace(cast_uchar(*s))) s++; /* skip trailing spaces */
+ if (empty || *s != '\0') return NULL; /* something wrong in the numeral */
+ else {
+ *result = l_castU2S((neg) ? 0u - a : a);
+ return s;
+ }
+}
+
+
+size_t luaO_str2num (const char *s, TValue *o) {
+ lua_Integer i; lua_Number n;
+ const char *e;
+ if ((e = l_str2int(s, &i)) != NULL) { /* try as an integer */
+ setivalue(o, i);
+ }
+ else if ((e = l_str2d(s, &n)) != NULL) { /* else try as a float */
+ setfltvalue(o, n);
+ }
+ else
+ return 0; /* conversion failed */
+ return (e - s) + 1; /* success; return string size */
+}
+
+
+int luaO_utf8esc (char *buff, unsigned long x) {
+ int n = 1; /* number of bytes put in buffer (backwards) */
+ lua_assert(x <= 0x10FFFF);
+ if (x < 0x80) /* ascii? */
+ buff[UTF8BUFFSZ - 1] = cast(char, x);
+ else { /* need continuation bytes */
+ unsigned int mfb = 0x3f; /* maximum that fits in first byte */
+ do { /* add continuation bytes */
+ buff[UTF8BUFFSZ - (n++)] = cast(char, 0x80 | (x & 0x3f));
+ x >>= 6; /* remove added bits */
+ mfb >>= 1; /* now there is one less bit available in first byte */
+ } while (x > mfb); /* still needs continuation byte? */
+ buff[UTF8BUFFSZ - n] = cast(char, (~mfb << 1) | x); /* add first byte */
+ }
+ return n;
+}
+
+
+/* maximum length of the conversion of a number to a string */
+#define MAXNUMBER2STR 50
+
+
+/*
+** Convert a number object to a string
+*/
+void luaO_tostring (lua_State *L, StkId obj) {
+ char buff[MAXNUMBER2STR];
+ size_t len;
+ lua_assert(ttisnumber(obj));
+ if (ttisinteger(obj))
+ len = lua_integer2str(buff, sizeof(buff), ivalue(obj));
+ else {
+ len = lua_number2str(buff, sizeof(buff), fltvalue(obj));
+#if !defined(LUA_COMPAT_FLOATSTRING)
+ if (buff[strspn(buff, "-0123456789")] == '\0') { /* looks like an int? */
+ buff[len++] = lua_getlocaledecpoint();
+ buff[len++] = '0'; /* adds '.0' to result */
+ }
+#endif
+ }
+ setsvalue2s(L, obj, luaS_newlstr(L, buff, len));
+}
+
+
+static void pushstr (lua_State *L, const char *str, size_t l) {
+ setsvalue2s(L, L->top, luaS_newlstr(L, str, l));
+ luaD_inctop(L);
+}
+
+
+/*
+** this function handles only '%d', '%c', '%f', '%p', and '%s'
+ conventional formats, plus Lua-specific '%I' and '%U'
+*/
+const char *luaO_pushvfstring (lua_State *L, const char *fmt, va_list argp) {
+ int n = 0;
+ for (;;) {
+ const char *e = strchr(fmt, '%');
+ if (e == NULL) break;
+ pushstr(L, fmt, e - fmt);
+ switch (*(e+1)) {
+ case 's': { /* zero-terminated string */
+ const char *s = va_arg(argp, char *);
+ if (s == NULL) s = "(null)";
+ pushstr(L, s, strlen(s));
+ break;
+ }
+ case 'c': { /* an 'int' as a character */
+ char buff = cast(char, va_arg(argp, int));
+ if (lisprint(cast_uchar(buff)))
+ pushstr(L, &buff, 1);
+ else /* non-printable character; print its code */
+ luaO_pushfstring(L, "<\\%d>", cast_uchar(buff));
+ break;
+ }
+ case 'd': { /* an 'int' */
+ setivalue(L->top, va_arg(argp, int));
+ goto top2str;
+ }
+ case 'I': { /* a 'lua_Integer' */
+ setivalue(L->top, cast(lua_Integer, va_arg(argp, l_uacInt)));
+ goto top2str;
+ }
+ case 'f': { /* a 'lua_Number' */
+ setfltvalue(L->top, cast_num(va_arg(argp, l_uacNumber)));
+ top2str: /* convert the top element to a string */
+ luaD_inctop(L);
+ luaO_tostring(L, L->top - 1);
+ break;
+ }
+ case 'p': { /* a pointer */
+ char buff[4*sizeof(void *) + 8]; /* should be enough space for a '%p' */
+ int l = l_sprintf(buff, sizeof(buff), "%p", va_arg(argp, void *));
+ pushstr(L, buff, l);
+ break;
+ }
+ case 'U': { /* an 'int' as a UTF-8 sequence */
+ char buff[UTF8BUFFSZ];
+ int l = luaO_utf8esc(buff, cast(long, va_arg(argp, long)));
+ pushstr(L, buff + UTF8BUFFSZ - l, l);
+ break;
+ }
+ case '%': {
+ pushstr(L, "%", 1);
+ break;
+ }
+ default: {
+ luaG_runerror(L, "invalid option '%%%c' to 'lua_pushfstring'",
+ *(e + 1));
+ }
+ }
+ n += 2;
+ fmt = e+2;
+ }
+ luaD_checkstack(L, 1);
+ pushstr(L, fmt, strlen(fmt));
+ if (n > 0) luaV_concat(L, n + 1);
+ return svalue(L->top - 1);
+}
+
+
+const char *luaO_pushfstring (lua_State *L, const char *fmt, ...) {
+ const char *msg;
+ va_list argp;
+ va_start(argp, fmt);
+ msg = luaO_pushvfstring(L, fmt, argp);
+ va_end(argp);
+ return msg;
+}
+
+
+/* number of chars of a literal string without the ending \0 */
+#define LL(x) (sizeof(x)/sizeof(char) - 1)
+
+#define RETS "..."
+#define PRE "[string \""
+#define POS "\"]"
+
+#define addstr(a,b,l) ( memcpy(a,b,(l) * sizeof(char)), a += (l) )
+
+void luaO_chunkid (char *out, const char *source, size_t bufflen) {
+ size_t l = strlen(source);
+ if (*source == '=') { /* 'literal' source */
+ if (l <= bufflen) /* small enough? */
+ memcpy(out, source + 1, l * sizeof(char));
+ else { /* truncate it */
+ addstr(out, source + 1, bufflen - 1);
+ *out = '\0';
+ }
+ }
+ else if (*source == '@') { /* file name */
+ if (l <= bufflen) /* small enough? */
+ memcpy(out, source + 1, l * sizeof(char));
+ else { /* add '...' before rest of name */
+ addstr(out, RETS, LL(RETS));
+ bufflen -= LL(RETS);
+ memcpy(out, source + 1 + l - bufflen, bufflen * sizeof(char));
+ }
+ }
+ else { /* string; format as [string "source"] */
+ const char *nl = strchr(source, '\n'); /* find first new line (if any) */
+ addstr(out, PRE, LL(PRE)); /* add prefix */
+ bufflen -= LL(PRE RETS POS) + 1; /* save space for prefix+suffix+'\0' */
+ if (l < bufflen && nl == NULL) { /* small one-line source? */
+ addstr(out, source, l); /* keep it */
+ }
+ else {
+ if (nl != NULL) l = nl - source; /* stop at first newline */
+ if (l > bufflen) l = bufflen;
+ addstr(out, source, l);
+ addstr(out, RETS, LL(RETS));
+ }
+ memcpy(out, POS, (LL(POS) + 1) * sizeof(char));
+ }
+}
+
diff --git a/lua/src/lobject.h b/lua/src/lobject.h
new file mode 100644
index 000000000..3c0422894
--- /dev/null
+++ b/lua/src/lobject.h
@@ -0,0 +1,549 @@
+/*
+** $Id: lobject.h,v 2.117 2016/08/01 19:51:24 roberto Exp $
+** Type definitions for Lua objects
+** See Copyright Notice in lua.h
+*/
+
+
+#ifndef lobject_h
+#define lobject_h
+
+
+#include <stdarg.h>
+
+
+#include "llimits.h"
+#include "lua.h"
+
+
+/*
+** Extra tags for non-values
+*/
+#define LUA_TPROTO LUA_NUMTAGS /* function prototypes */
+#define LUA_TDEADKEY (LUA_NUMTAGS+1) /* removed keys in tables */
+
+/*
+** number of all possible tags (including LUA_TNONE but excluding DEADKEY)
+*/
+#define LUA_TOTALTAGS (LUA_TPROTO + 2)
+
+
+/*
+** tags for Tagged Values have the following use of bits:
+** bits 0-3: actual tag (a LUA_T* value)
+** bits 4-5: variant bits
+** bit 6: whether value is collectable
+*/
+
+
+/*
+** LUA_TFUNCTION variants:
+** 0 - Lua function
+** 1 - light C function
+** 2 - regular C function (closure)
+*/
+
+/* Variant tags for functions */
+#define LUA_TLCL (LUA_TFUNCTION | (0 << 4)) /* Lua closure */
+#define LUA_TLCF (LUA_TFUNCTION | (1 << 4)) /* light C function */
+#define LUA_TCCL (LUA_TFUNCTION | (2 << 4)) /* C closure */
+
+
+/* Variant tags for strings */
+#define LUA_TSHRSTR (LUA_TSTRING | (0 << 4)) /* short strings */
+#define LUA_TLNGSTR (LUA_TSTRING | (1 << 4)) /* long strings */
+
+
+/* Variant tags for numbers */
+#define LUA_TNUMFLT (LUA_TNUMBER | (0 << 4)) /* float numbers */
+#define LUA_TNUMINT (LUA_TNUMBER | (1 << 4)) /* integer numbers */
+
+
+/* Bit mark for collectable types */
+#define BIT_ISCOLLECTABLE (1 << 6)
+
+/* mark a tag as collectable */
+#define ctb(t) ((t) | BIT_ISCOLLECTABLE)
+
+
+/*
+** Common type for all collectable objects
+*/
+typedef struct GCObject GCObject;
+
+
+/*
+** Common Header for all collectable objects (in macro form, to be
+** included in other objects)
+*/
+#define CommonHeader GCObject *next; lu_byte tt; lu_byte marked
+
+
+/*
+** Common type has only the common header
+*/
+struct GCObject {
+ CommonHeader;
+};
+
+
+
+
+/*
+** Tagged Values. This is the basic representation of values in Lua,
+** an actual value plus a tag with its type.
+*/
+
+/*
+** Union of all Lua values
+*/
+typedef union Value {
+ GCObject *gc; /* collectable objects */
+ void *p; /* light userdata */
+ int b; /* booleans */
+ lua_CFunction f; /* light C functions */
+ lua_Integer i; /* integer numbers */
+ lua_Number n; /* float numbers */
+} Value;
+
+
+#define TValuefields Value value_; int tt_
+
+
+typedef struct lua_TValue {
+ TValuefields;
+} TValue;
+
+
+
+/* macro defining a nil value */
+#define NILCONSTANT {NULL}, LUA_TNIL
+
+
+#define val_(o) ((o)->value_)
+
+
+/* raw type tag of a TValue */
+#define rttype(o) ((o)->tt_)
+
+/* tag with no variants (bits 0-3) */
+#define novariant(x) ((x) & 0x0F)
+
+/* type tag of a TValue (bits 0-3 for tags + variant bits 4-5) */
+#define ttype(o) (rttype(o) & 0x3F)
+
+/* type tag of a TValue with no variants (bits 0-3) */
+#define ttnov(o) (novariant(rttype(o)))
+
+
+/* Macros to test type */
+#define checktag(o,t) (rttype(o) == (t))
+#define checktype(o,t) (ttnov(o) == (t))
+#define ttisnumber(o) checktype((o), LUA_TNUMBER)
+#define ttisfloat(o) checktag((o), LUA_TNUMFLT)
+#define ttisinteger(o) checktag((o), LUA_TNUMINT)
+#define ttisnil(o) checktag((o), LUA_TNIL)
+#define ttisboolean(o) checktag((o), LUA_TBOOLEAN)
+#define ttislightuserdata(o) checktag((o), LUA_TLIGHTUSERDATA)
+#define ttisstring(o) checktype((o), LUA_TSTRING)
+#define ttisshrstring(o) checktag((o), ctb(LUA_TSHRSTR))
+#define ttislngstring(o) checktag((o), ctb(LUA_TLNGSTR))
+#define ttistable(o) checktag((o), ctb(LUA_TTABLE))
+#define ttisfunction(o) checktype(o, LUA_TFUNCTION)
+#define ttisclosure(o) ((rttype(o) & 0x1F) == LUA_TFUNCTION)
+#define ttisCclosure(o) checktag((o), ctb(LUA_TCCL))
+#define ttisLclosure(o) checktag((o), ctb(LUA_TLCL))
+#define ttislcf(o) checktag((o), LUA_TLCF)
+#define ttisfulluserdata(o) checktag((o), ctb(LUA_TUSERDATA))
+#define ttisthread(o) checktag((o), ctb(LUA_TTHREAD))
+#define ttisdeadkey(o) checktag((o), LUA_TDEADKEY)
+
+
+/* Macros to access values */
+#define ivalue(o) check_exp(ttisinteger(o), val_(o).i)
+#define fltvalue(o) check_exp(ttisfloat(o), val_(o).n)
+#define nvalue(o) check_exp(ttisnumber(o), \
+ (ttisinteger(o) ? cast_num(ivalue(o)) : fltvalue(o)))
+#define gcvalue(o) check_exp(iscollectable(o), val_(o).gc)
+#define pvalue(o) check_exp(ttislightuserdata(o), val_(o).p)
+#define tsvalue(o) check_exp(ttisstring(o), gco2ts(val_(o).gc))
+#define uvalue(o) check_exp(ttisfulluserdata(o), gco2u(val_(o).gc))
+#define clvalue(o) check_exp(ttisclosure(o), gco2cl(val_(o).gc))
+#define clLvalue(o) check_exp(ttisLclosure(o), gco2lcl(val_(o).gc))
+#define clCvalue(o) check_exp(ttisCclosure(o), gco2ccl(val_(o).gc))
+#define fvalue(o) check_exp(ttislcf(o), val_(o).f)
+#define hvalue(o) check_exp(ttistable(o), gco2t(val_(o).gc))
+#define bvalue(o) check_exp(ttisboolean(o), val_(o).b)
+#define thvalue(o) check_exp(ttisthread(o), gco2th(val_(o).gc))
+/* a dead value may get the 'gc' field, but cannot access its contents */
+#define deadvalue(o) check_exp(ttisdeadkey(o), cast(void *, val_(o).gc))
+
+#define l_isfalse(o) (ttisnil(o) || (ttisboolean(o) && bvalue(o) == 0))
+
+
+#define iscollectable(o) (rttype(o) & BIT_ISCOLLECTABLE)
+
+
+/* Macros for internal tests */
+#define righttt(obj) (ttype(obj) == gcvalue(obj)->tt)
+
+#define checkliveness(L,obj) \
+ lua_longassert(!iscollectable(obj) || \
+ (righttt(obj) && (L == NULL || !isdead(G(L),gcvalue(obj)))))
+
+
+/* Macros to set values */
+#define settt_(o,t) ((o)->tt_=(t))
+
+#define setfltvalue(obj,x) \
+ { TValue *io=(obj); val_(io).n=(x); settt_(io, LUA_TNUMFLT); }
+
+#define chgfltvalue(obj,x) \
+ { TValue *io=(obj); lua_assert(ttisfloat(io)); val_(io).n=(x); }
+
+#define setivalue(obj,x) \
+ { TValue *io=(obj); val_(io).i=(x); settt_(io, LUA_TNUMINT); }
+
+#define chgivalue(obj,x) \
+ { TValue *io=(obj); lua_assert(ttisinteger(io)); val_(io).i=(x); }
+
+#define setnilvalue(obj) settt_(obj, LUA_TNIL)
+
+#define setfvalue(obj,x) \
+ { TValue *io=(obj); val_(io).f=(x); settt_(io, LUA_TLCF); }
+
+#define setpvalue(obj,x) \
+ { TValue *io=(obj); val_(io).p=(x); settt_(io, LUA_TLIGHTUSERDATA); }
+
+#define setbvalue(obj,x) \
+ { TValue *io=(obj); val_(io).b=(x); settt_(io, LUA_TBOOLEAN); }
+
+#define setgcovalue(L,obj,x) \
+ { TValue *io = (obj); GCObject *i_g=(x); \
+ val_(io).gc = i_g; settt_(io, ctb(i_g->tt)); }
+
+#define setsvalue(L,obj,x) \
+ { TValue *io = (obj); TString *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(x_->tt)); \
+ checkliveness(L,io); }
+
+#define setuvalue(L,obj,x) \
+ { TValue *io = (obj); Udata *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(LUA_TUSERDATA)); \
+ checkliveness(L,io); }
+
+#define setthvalue(L,obj,x) \
+ { TValue *io = (obj); lua_State *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(LUA_TTHREAD)); \
+ checkliveness(L,io); }
+
+#define setclLvalue(L,obj,x) \
+ { TValue *io = (obj); LClosure *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(LUA_TLCL)); \
+ checkliveness(L,io); }
+
+#define setclCvalue(L,obj,x) \
+ { TValue *io = (obj); CClosure *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(LUA_TCCL)); \
+ checkliveness(L,io); }
+
+#define sethvalue(L,obj,x) \
+ { TValue *io = (obj); Table *x_ = (x); \
+ val_(io).gc = obj2gco(x_); settt_(io, ctb(LUA_TTABLE)); \
+ checkliveness(L,io); }
+
+#define setdeadvalue(obj) settt_(obj, LUA_TDEADKEY)
+
+
+
+#define setobj(L,obj1,obj2) \
+ { TValue *io1=(obj1); *io1 = *(obj2); \
+ (void)L; checkliveness(L,io1); }
+
+
+/*
+** different types of assignments, according to destination
+*/
+
+/* from stack to (same) stack */
+#define setobjs2s setobj
+/* to stack (not from same stack) */
+#define setobj2s setobj
+#define setsvalue2s setsvalue
+#define sethvalue2s sethvalue
+#define setptvalue2s setptvalue
+/* from table to same table */
+#define setobjt2t setobj
+/* to new object */
+#define setobj2n setobj
+#define setsvalue2n setsvalue
+
+/* to table (define it as an expression to be used in macros) */
+#define setobj2t(L,o1,o2) ((void)L, *(o1)=*(o2), checkliveness(L,(o1)))
+
+
+
+
+/*
+** {======================================================
+** types and prototypes
+** =======================================================
+*/
+
+
+typedef TValue *StkId; /* index to stack elements */
+
+
+
+
+/*
+** Header for string value; string bytes follow the end of this structure
+** (aligned according to 'UTString'; see next).
+*/
+typedef struct TString {
+ CommonHeader;
+ lu_byte extra; /* reserved words for short strings; "has hash" for longs */
+ lu_byte shrlen; /* length for short strings */
+ unsigned int hash;
+ union {
+ size_t lnglen; /* length for long strings */
+ struct TString *hnext; /* linked list for hash table */
+ } u;
+} TString;
+
+
+/*
+** Ensures that address after this type is always fully aligned.
+*/
+typedef union UTString {
+ L_Umaxalign dummy; /* ensures maximum alignment for strings */
+ TString tsv;
+} UTString;
+
+
+/*
+** Get the actual string (array of bytes) from a 'TString'.
+** (Access to 'extra' ensures that value is really a 'TString'.)
+*/
+#define getstr(ts) \
+ check_exp(sizeof((ts)->extra), cast(char *, (ts)) + sizeof(UTString))
+
+
+/* get the actual string (array of bytes) from a Lua value */
+#define svalue(o) getstr(tsvalue(o))
+
+/* get string length from 'TString *s' */
+#define tsslen(s) ((s)->tt == LUA_TSHRSTR ? (s)->shrlen : (s)->u.lnglen)
+
+/* get string length from 'TValue *o' */
+#define vslen(o) tsslen(tsvalue(o))
+
+
+/*
+** Header for userdata; memory area follows the end of this structure
+** (aligned according to 'UUdata'; see next).
+*/
+typedef struct Udata {
+ CommonHeader;
+ lu_byte ttuv_; /* user value's tag */
+ struct Table *metatable;
+ size_t len; /* number of bytes */
+ union Value user_; /* user value */
+} Udata;
+
+
+/*
+** Ensures that address after this type is always fully aligned.
+*/
+typedef union UUdata {
+ L_Umaxalign dummy; /* ensures maximum alignment for 'local' udata */
+ Udata uv;
+} UUdata;
+
+
+/*
+** Get the address of memory block inside 'Udata'.
+** (Access to 'ttuv_' ensures that value is really a 'Udata'.)
+*/
+#define getudatamem(u) \
+ check_exp(sizeof((u)->ttuv_), (cast(char*, (u)) + sizeof(UUdata)))
+
+#define setuservalue(L,u,o) \
+ { const TValue *io=(o); Udata *iu = (u); \
+ iu->user_ = io->value_; iu->ttuv_ = rttype(io); \
+ checkliveness(L,io); }
+
+
+#define getuservalue(L,u,o) \
+ { TValue *io=(o); const Udata *iu = (u); \
+ io->value_ = iu->user_; settt_(io, iu->ttuv_); \
+ checkliveness(L,io); }
+
+
+/*
+** Description of an upvalue for function prototypes
+*/
+typedef struct Upvaldesc {
+ TString *name; /* upvalue name (for debug information) */
+ lu_byte instack; /* whether it is in stack (register) */
+ lu_byte idx; /* index of upvalue (in stack or in outer function's list) */
+} Upvaldesc;
+
+
+/*
+** Description of a local variable for function prototypes
+** (used for debug information)
+*/
+typedef struct LocVar {
+ TString *varname;
+ int startpc; /* first point where variable is active */
+ int endpc; /* first point where variable is dead */
+} LocVar;
+
+
+/*
+** Function Prototypes
+*/
+typedef struct Proto {
+ CommonHeader;
+ lu_byte numparams; /* number of fixed parameters */
+ lu_byte is_vararg;
+ lu_byte maxstacksize; /* number of registers needed by this function */
+ int sizeupvalues; /* size of 'upvalues' */
+ int sizek; /* size of 'k' */
+ int sizecode;
+ int sizelineinfo;
+ int sizep; /* size of 'p' */
+ int sizelocvars;
+ int linedefined; /* debug information */
+ int lastlinedefined; /* debug information */
+ TValue *k; /* constants used by the function */
+ Instruction *code; /* opcodes */
+ struct Proto **p; /* functions defined inside the function */
+ int *lineinfo; /* map from opcodes to source lines (debug information) */
+ LocVar *locvars; /* information about local variables (debug information) */
+ Upvaldesc *upvalues; /* upvalue information */
+ struct LClosure *cache; /* last-created closure with this prototype */
+ TString *source; /* used for debug information */
+ GCObject *gclist;
+} Proto;
+
+
+
+/*
+** Lua Upvalues
+*/
+typedef struct UpVal UpVal;
+
+
+/*
+** Closures
+*/
+
+#define ClosureHeader \
+ CommonHeader; lu_byte nupvalues; GCObject *gclist
+
+typedef struct CClosure {
+ ClosureHeader;
+ lua_CFunction f;
+ TValue upvalue[1]; /* list of upvalues */
+} CClosure;
+
+
+typedef struct LClosure {
+ ClosureHeader;
+ struct Proto *p;
+ UpVal *upvals[1]; /* list of upvalues */
+} LClosure;
+
+
+typedef union Closure {
+ CClosure c;
+ LClosure l;
+} Closure;
+
+
+#define isLfunction(o) ttisLclosure(o)
+
+#define getproto(o) (clLvalue(o)->p)
+
+
+/*
+** Tables
+*/
+
+typedef union TKey {
+ struct {
+ TValuefields;
+ int next; /* for chaining (offset for next node) */
+ } nk;
+ TValue tvk;
+} TKey;
+
+
+/* copy a value into a key without messing up field 'next' */
+#define setnodekey(L,key,obj) \
+ { TKey *k_=(key); const TValue *io_=(obj); \
+ k_->nk.value_ = io_->value_; k_->nk.tt_ = io_->tt_; \
+ (void)L; checkliveness(L,io_); }
+
+
+typedef struct Node {
+ TValue i_val;
+ TKey i_key;
+} Node;
+
+
+typedef struct Table {
+ CommonHeader;
+ lu_byte flags; /* 1<<p means tagmethod(p) is not present */
+ lu_byte lsizenode; /* log2 of size of 'node' array */
+ unsigned int sizearray; /* size of 'array' array */
+ TValue *array; /* array part */
+ Node *node;
+ Node *lastfree; /* any free position is before this position */
+ struct Table *metatable;
+ GCObject *gclist;
+} Table;
+
+
+
+/*
+** 'module' operation for hashing (size is always a power of 2)
+*/
+#define lmod(s,size) \
+ (check_exp((size&(size-1))==0, (cast(int, (s) & ((size)-1)))))
+
+
+#define twoto(x) (1<<(x))
+#define sizenode(t) (twoto((t)->lsizenode))
+
+
+/*
+** (address of) a fixed nil value
+*/
+#define luaO_nilobject (&luaO_nilobject_)
+
+
+LUAI_DDEC const TValue luaO_nilobject_;
+
+/* size of buffer for 'luaO_utf8esc' function */
+#define UTF8BUFFSZ 8
+
+LUAI_FUNC int luaO_int2fb (unsigned int x);
+LUAI_FUNC int luaO_fb2int (int x);
+LUAI_FUNC int luaO_utf8esc (char *buff, unsigned long x);
+LUAI_FUNC int luaO_ceillog2 (unsigned int x);
+LUAI_FUNC void luaO_arith (lua_State *L, int op, const TValue *p1,
+ const TValue *p2, TValue *res);
+LUAI_FUNC size_t luaO_str2num (const char *s, TValue *o);
+LUAI_FUNC int luaO_hexavalue (int c);
+LUAI_FUNC void luaO_tostring (lua_State *L, StkId obj);
+LUAI_FUNC const char *luaO_pushvfstring (lua_State *L, const char *fmt,
+ va_list argp);
+LUAI_FUNC const char *luaO_pushfstring (lua_State *L, const char *fmt, ...);
+LUAI_FUNC void luaO_chunkid (char *out, const char *source, size_t len);
+
+
+#endif
+
diff --git a/lua/src/lopcodes.c b/lua/src/lopcodes.c
new file mode 100644
index 000000000..a1cbef857
--- /dev/null
+++ b/lua/src/lopcodes.c
@@ -0,0 +1,124 @@
+/*
+** $Id: lopcodes.c,v 1.55 2015/01/05 13:48:33 roberto Exp $
+** Opcodes for Lua virtual machine
+** See Copyright Notice in lua.h
+*/
+
+#define lopcodes_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+
+#include "lopcodes.h"
+
+
+/* ORDER OP */
+
+LUAI_DDEF const char *const luaP_opnames[NUM_OPCODES+1] = {
+ "MOVE",
+ "LOADK",
+ "LOADKX",
+ "LOADBOOL",
+ "LOADNIL",
+ "GETUPVAL",
+ "GETTABUP",
+ "GETTABLE",
+ "SETTABUP",
+ "SETUPVAL",
+ "SETTABLE",
+ "NEWTABLE",
+ "SELF",
+ "ADD",
+ "SUB",
+ "MUL",
+ "MOD",
+ "POW",
+ "DIV",
+ "IDIV",
+ "BAND",
+ "BOR",
+ "BXOR",
+ "SHL",
+ "SHR",
+ "UNM",
+ "BNOT",
+ "NOT",
+ "LEN",
+ "CONCAT",
+ "JMP",
+ "EQ",
+ "LT",
+ "LE",
+ "TEST",
+ "TESTSET",
+ "CALL",
+ "TAILCALL",
+ "RETURN",
+ "FORLOOP",
+ "FORPREP",
+ "TFORCALL",
+ "TFORLOOP",
+ "SETLIST",
+ "CLOSURE",
+ "VARARG",
+ "EXTRAARG",
+ NULL
+};
+
+
+#define opmode(t,a,b,c,m) (((t)<<7) | ((a)<<6) | ((b)<<4) | ((c)<<2) | (m))
+
+LUAI_DDEF const lu_byte luaP_opmodes[NUM_OPCODES] = {
+/* T A B C mode opcode */
+ opmode(0, 1, OpArgR, OpArgN, iABC) /* OP_MOVE */
+ ,opmode(0, 1, OpArgK, OpArgN, iABx) /* OP_LOADK */
+ ,opmode(0, 1, OpArgN, OpArgN, iABx) /* OP_LOADKX */
+ ,opmode(0, 1, OpArgU, OpArgU, iABC) /* OP_LOADBOOL */
+ ,opmode(0, 1, OpArgU, OpArgN, iABC) /* OP_LOADNIL */
+ ,opmode(0, 1, OpArgU, OpArgN, iABC) /* OP_GETUPVAL */
+ ,opmode(0, 1, OpArgU, OpArgK, iABC) /* OP_GETTABUP */
+ ,opmode(0, 1, OpArgR, OpArgK, iABC) /* OP_GETTABLE */
+ ,opmode(0, 0, OpArgK, OpArgK, iABC) /* OP_SETTABUP */
+ ,opmode(0, 0, OpArgU, OpArgN, iABC) /* OP_SETUPVAL */
+ ,opmode(0, 0, OpArgK, OpArgK, iABC) /* OP_SETTABLE */
+ ,opmode(0, 1, OpArgU, OpArgU, iABC) /* OP_NEWTABLE */
+ ,opmode(0, 1, OpArgR, OpArgK, iABC) /* OP_SELF */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_ADD */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_SUB */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_MUL */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_MOD */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_POW */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_DIV */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_IDIV */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_BAND */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_BOR */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_BXOR */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_SHL */
+ ,opmode(0, 1, OpArgK, OpArgK, iABC) /* OP_SHR */
+ ,opmode(0, 1, OpArgR, OpArgN, iABC) /* OP_UNM */
+ ,opmode(0, 1, OpArgR, OpArgN, iABC) /* OP_BNOT */
+ ,opmode(0, 1, OpArgR, OpArgN, iABC) /* OP_NOT */
+ ,opmode(0, 1, OpArgR, OpArgN, iABC) /* OP_LEN */
+ ,opmode(0, 1, OpArgR, OpArgR, iABC) /* OP_CONCAT */
+ ,opmode(0, 0, OpArgR, OpArgN, iAsBx) /* OP_JMP */
+ ,opmode(1, 0, OpArgK, OpArgK, iABC) /* OP_EQ */
+ ,opmode(1, 0, OpArgK, OpArgK, iABC) /* OP_LT */
+ ,opmode(1, 0, OpArgK, OpArgK, iABC) /* OP_LE */
+ ,opmode(1, 0, OpArgN, OpArgU, iABC) /* OP_TEST */
+ ,opmode(1, 1, OpArgR, OpArgU, iABC) /* OP_TESTSET */
+ ,opmode(0, 1, OpArgU, OpArgU, iABC) /* OP_CALL */
+ ,opmode(0, 1, OpArgU, OpArgU, iABC) /* OP_TAILCALL */
+ ,opmode(0, 0, OpArgU, OpArgN, iABC) /* OP_RETURN */
+ ,opmode(0, 1, OpArgR, OpArgN, iAsBx) /* OP_FORLOOP */
+ ,opmode(0, 1, OpArgR, OpArgN, iAsBx) /* OP_FORPREP */
+ ,opmode(0, 0, OpArgN, OpArgU, iABC) /* OP_TFORCALL */
+ ,opmode(0, 1, OpArgR, OpArgN, iAsBx) /* OP_TFORLOOP */
+ ,opmode(0, 0, OpArgU, OpArgU, iABC) /* OP_SETLIST */
+ ,opmode(0, 1, OpArgU, OpArgN, iABx) /* OP_CLOSURE */
+ ,opmode(0, 1, OpArgU, OpArgN, iABC) /* OP_VARARG */
+ ,opmode(0, 0, OpArgU, OpArgU, iAx) /* OP_EXTRAARG */
+};
+
diff --git a/lua/src/lopcodes.h b/lua/src/lopcodes.h
new file mode 100644
index 000000000..bbc4b6196
--- /dev/null
+++ b/lua/src/lopcodes.h
@@ -0,0 +1,297 @@
+/*
+** $Id: lopcodes.h,v 1.149 2016/07/19 17:12:21 roberto Exp $
+** Opcodes for Lua virtual machine
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lopcodes_h
+#define lopcodes_h
+
+#include "llimits.h"
+
+
+/*===========================================================================
+ We assume that instructions are unsigned numbers.
+ All instructions have an opcode in the first 6 bits.
+ Instructions can have the following fields:
+ 'A' : 8 bits
+ 'B' : 9 bits
+ 'C' : 9 bits
+ 'Ax' : 26 bits ('A', 'B', and 'C' together)
+ 'Bx' : 18 bits ('B' and 'C' together)
+ 'sBx' : signed Bx
+
+ A signed argument is represented in excess K; that is, the number
+ value is the unsigned value minus K. K is exactly the maximum value
+ for that argument (so that -max is represented by 0, and +max is
+ represented by 2*max), which is half the maximum for the corresponding
+ unsigned argument.
+===========================================================================*/
+
+
+enum OpMode {iABC, iABx, iAsBx, iAx}; /* basic instruction format */
+
+
+/*
+** size and position of opcode arguments.
+*/
+#define SIZE_C 9
+#define SIZE_B 9
+#define SIZE_Bx (SIZE_C + SIZE_B)
+#define SIZE_A 8
+#define SIZE_Ax (SIZE_C + SIZE_B + SIZE_A)
+
+#define SIZE_OP 6
+
+#define POS_OP 0
+#define POS_A (POS_OP + SIZE_OP)
+#define POS_C (POS_A + SIZE_A)
+#define POS_B (POS_C + SIZE_C)
+#define POS_Bx POS_C
+#define POS_Ax POS_A
+
+
+/*
+** limits for opcode arguments.
+** we use (signed) int to manipulate most arguments,
+** so they must fit in LUAI_BITSINT-1 bits (-1 for sign)
+*/
+#if SIZE_Bx < LUAI_BITSINT-1
+#define MAXARG_Bx ((1<<SIZE_Bx)-1)
+#define MAXARG_sBx (MAXARG_Bx>>1) /* 'sBx' is signed */
+#else
+#define MAXARG_Bx MAX_INT
+#define MAXARG_sBx MAX_INT
+#endif
+
+#if SIZE_Ax < LUAI_BITSINT-1
+#define MAXARG_Ax ((1<<SIZE_Ax)-1)
+#else
+#define MAXARG_Ax MAX_INT
+#endif
+
+
+#define MAXARG_A ((1<<SIZE_A)-1)
+#define MAXARG_B ((1<<SIZE_B)-1)
+#define MAXARG_C ((1<<SIZE_C)-1)
+
+
+/* creates a mask with 'n' 1 bits at position 'p' */
+#define MASK1(n,p) ((~((~(Instruction)0)<<(n)))<<(p))
+
+/* creates a mask with 'n' 0 bits at position 'p' */
+#define MASK0(n,p) (~MASK1(n,p))
+
+/*
+** the following macros help to manipulate instructions
+*/
+
+#define GET_OPCODE(i) (cast(OpCode, ((i)>>POS_OP) & MASK1(SIZE_OP,0)))
+#define SET_OPCODE(i,o) ((i) = (((i)&MASK0(SIZE_OP,POS_OP)) | \
+ ((cast(Instruction, o)<<POS_OP)&MASK1(SIZE_OP,POS_OP))))
+
+#define getarg(i,pos,size) (cast(int, ((i)>>pos) & MASK1(size,0)))
+#define setarg(i,v,pos,size) ((i) = (((i)&MASK0(size,pos)) | \
+ ((cast(Instruction, v)<<pos)&MASK1(size,pos))))
+
+#define GETARG_A(i) getarg(i, POS_A, SIZE_A)
+#define SETARG_A(i,v) setarg(i, v, POS_A, SIZE_A)
+
+#define GETARG_B(i) getarg(i, POS_B, SIZE_B)
+#define SETARG_B(i,v) setarg(i, v, POS_B, SIZE_B)
+
+#define GETARG_C(i) getarg(i, POS_C, SIZE_C)
+#define SETARG_C(i,v) setarg(i, v, POS_C, SIZE_C)
+
+#define GETARG_Bx(i) getarg(i, POS_Bx, SIZE_Bx)
+#define SETARG_Bx(i,v) setarg(i, v, POS_Bx, SIZE_Bx)
+
+#define GETARG_Ax(i) getarg(i, POS_Ax, SIZE_Ax)
+#define SETARG_Ax(i,v) setarg(i, v, POS_Ax, SIZE_Ax)
+
+#define GETARG_sBx(i) (GETARG_Bx(i)-MAXARG_sBx)
+#define SETARG_sBx(i,b) SETARG_Bx((i),cast(unsigned int, (b)+MAXARG_sBx))
+
+
+#define CREATE_ABC(o,a,b,c) ((cast(Instruction, o)<<POS_OP) \
+ | (cast(Instruction, a)<<POS_A) \
+ | (cast(Instruction, b)<<POS_B) \
+ | (cast(Instruction, c)<<POS_C))
+
+#define CREATE_ABx(o,a,bc) ((cast(Instruction, o)<<POS_OP) \
+ | (cast(Instruction, a)<<POS_A) \
+ | (cast(Instruction, bc)<<POS_Bx))
+
+#define CREATE_Ax(o,a) ((cast(Instruction, o)<<POS_OP) \
+ | (cast(Instruction, a)<<POS_Ax))
+
+
+/*
+** Macros to operate RK indices
+*/
+
+/* this bit 1 means constant (0 means register) */
+#define BITRK (1 << (SIZE_B - 1))
+
+/* test whether value is a constant */
+#define ISK(x) ((x) & BITRK)
+
+/* gets the index of the constant */
+#define INDEXK(r) ((int)(r) & ~BITRK)
+
+#if !defined(MAXINDEXRK) /* (for debugging only) */
+#define MAXINDEXRK (BITRK - 1)
+#endif
+
+/* code a constant index as a RK value */
+#define RKASK(x) ((x) | BITRK)
+
+
+/*
+** invalid register that fits in 8 bits
+*/
+#define NO_REG MAXARG_A
+
+
+/*
+** R(x) - register
+** Kst(x) - constant (in constant table)
+** RK(x) == if ISK(x) then Kst(INDEXK(x)) else R(x)
+*/
+
+
+/*
+** grep "ORDER OP" if you change these enums
+*/
+
+typedef enum {
+/*----------------------------------------------------------------------
+name args description
+------------------------------------------------------------------------*/
+OP_MOVE,/* A B R(A) := R(B) */
+OP_LOADK,/* A Bx R(A) := Kst(Bx) */
+OP_LOADKX,/* A R(A) := Kst(extra arg) */
+OP_LOADBOOL,/* A B C R(A) := (Bool)B; if (C) pc++ */
+OP_LOADNIL,/* A B R(A), R(A+1), ..., R(A+B) := nil */
+OP_GETUPVAL,/* A B R(A) := UpValue[B] */
+
+OP_GETTABUP,/* A B C R(A) := UpValue[B][RK(C)] */
+OP_GETTABLE,/* A B C R(A) := R(B)[RK(C)] */
+
+OP_SETTABUP,/* A B C UpValue[A][RK(B)] := RK(C) */
+OP_SETUPVAL,/* A B UpValue[B] := R(A) */
+OP_SETTABLE,/* A B C R(A)[RK(B)] := RK(C) */
+
+OP_NEWTABLE,/* A B C R(A) := {} (size = B,C) */
+
+OP_SELF,/* A B C R(A+1) := R(B); R(A) := R(B)[RK(C)] */
+
+OP_ADD,/* A B C R(A) := RK(B) + RK(C) */
+OP_SUB,/* A B C R(A) := RK(B) - RK(C) */
+OP_MUL,/* A B C R(A) := RK(B) * RK(C) */
+OP_MOD,/* A B C R(A) := RK(B) % RK(C) */
+OP_POW,/* A B C R(A) := RK(B) ^ RK(C) */
+OP_DIV,/* A B C R(A) := RK(B) / RK(C) */
+OP_IDIV,/* A B C R(A) := RK(B) // RK(C) */
+OP_BAND,/* A B C R(A) := RK(B) & RK(C) */
+OP_BOR,/* A B C R(A) := RK(B) | RK(C) */
+OP_BXOR,/* A B C R(A) := RK(B) ~ RK(C) */
+OP_SHL,/* A B C R(A) := RK(B) << RK(C) */
+OP_SHR,/* A B C R(A) := RK(B) >> RK(C) */
+OP_UNM,/* A B R(A) := -R(B) */
+OP_BNOT,/* A B R(A) := ~R(B) */
+OP_NOT,/* A B R(A) := not R(B) */
+OP_LEN,/* A B R(A) := length of R(B) */
+
+OP_CONCAT,/* A B C R(A) := R(B).. ... ..R(C) */
+
+OP_JMP,/* A sBx pc+=sBx; if (A) close all upvalues >= R(A - 1) */
+OP_EQ,/* A B C if ((RK(B) == RK(C)) ~= A) then pc++ */
+OP_LT,/* A B C if ((RK(B) < RK(C)) ~= A) then pc++ */
+OP_LE,/* A B C if ((RK(B) <= RK(C)) ~= A) then pc++ */
+
+OP_TEST,/* A C if not (R(A) <=> C) then pc++ */
+OP_TESTSET,/* A B C if (R(B) <=> C) then R(A) := R(B) else pc++ */
+
+OP_CALL,/* A B C R(A), ... ,R(A+C-2) := R(A)(R(A+1), ... ,R(A+B-1)) */
+OP_TAILCALL,/* A B C return R(A)(R(A+1), ... ,R(A+B-1)) */
+OP_RETURN,/* A B return R(A), ... ,R(A+B-2) (see note) */
+
+OP_FORLOOP,/* A sBx R(A)+=R(A+2);
+ if R(A) <?= R(A+1) then { pc+=sBx; R(A+3)=R(A) }*/
+OP_FORPREP,/* A sBx R(A)-=R(A+2); pc+=sBx */
+
+OP_TFORCALL,/* A C R(A+3), ... ,R(A+2+C) := R(A)(R(A+1), R(A+2)); */
+OP_TFORLOOP,/* A sBx if R(A+1) ~= nil then { R(A)=R(A+1); pc += sBx }*/
+
+OP_SETLIST,/* A B C R(A)[(C-1)*FPF+i] := R(A+i), 1 <= i <= B */
+
+OP_CLOSURE,/* A Bx R(A) := closure(KPROTO[Bx]) */
+
+OP_VARARG,/* A B R(A), R(A+1), ..., R(A+B-2) = vararg */
+
+OP_EXTRAARG/* Ax extra (larger) argument for previous opcode */
+} OpCode;
+
+
+#define NUM_OPCODES (cast(int, OP_EXTRAARG) + 1)
+
+
+
+/*===========================================================================
+ Notes:
+ (*) In OP_CALL, if (B == 0) then B = top. If (C == 0), then 'top' is
+ set to last_result+1, so next open instruction (OP_CALL, OP_RETURN,
+ OP_SETLIST) may use 'top'.
+
+ (*) In OP_VARARG, if (B == 0) then use actual number of varargs and
+ set top (like in OP_CALL with C == 0).
+
+ (*) In OP_RETURN, if (B == 0) then return up to 'top'.
+
+ (*) In OP_SETLIST, if (B == 0) then B = 'top'; if (C == 0) then next
+ 'instruction' is EXTRAARG(real C).
+
+ (*) In OP_LOADKX, the next 'instruction' is always EXTRAARG.
+
+ (*) For comparisons, A specifies what condition the test should accept
+ (true or false).
+
+ (*) All 'skips' (pc++) assume that next instruction is a jump.
+
+===========================================================================*/
+
+
+/*
+** masks for instruction properties. The format is:
+** bits 0-1: op mode
+** bits 2-3: C arg mode
+** bits 4-5: B arg mode
+** bit 6: instruction set register A
+** bit 7: operator is a test (next instruction must be a jump)
+*/
+
+enum OpArgMask {
+ OpArgN, /* argument is not used */
+ OpArgU, /* argument is used */
+ OpArgR, /* argument is a register or a jump offset */
+ OpArgK /* argument is a constant or register/constant */
+};
+
+LUAI_DDEC const lu_byte luaP_opmodes[NUM_OPCODES];
+
+#define getOpMode(m) (cast(enum OpMode, luaP_opmodes[m] & 3))
+#define getBMode(m) (cast(enum OpArgMask, (luaP_opmodes[m] >> 4) & 3))
+#define getCMode(m) (cast(enum OpArgMask, (luaP_opmodes[m] >> 2) & 3))
+#define testAMode(m) (luaP_opmodes[m] & (1 << 6))
+#define testTMode(m) (luaP_opmodes[m] & (1 << 7))
+
+
+LUAI_DDEC const char *const luaP_opnames[NUM_OPCODES+1]; /* opcode names */
+
+
+/* number of list items to accumulate before a SETLIST instruction */
+#define LFIELDS_PER_FLUSH 50
+
+
+#endif
diff --git a/lua/src/loslib.c b/lua/src/loslib.c
new file mode 100644
index 000000000..5a94eb906
--- /dev/null
+++ b/lua/src/loslib.c
@@ -0,0 +1,407 @@
+/*
+** $Id: loslib.c,v 1.65 2016/07/18 17:58:58 roberto Exp $
+** Standard Operating System library
+** See Copyright Notice in lua.h
+*/
+
+#define loslib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <errno.h>
+#include <locale.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+/*
+** {==================================================================
+** List of valid conversion specifiers for the 'strftime' function;
+** options are grouped by length; group of length 2 start with '||'.
+** ===================================================================
+*/
+#if !defined(LUA_STRFTIMEOPTIONS) /* { */
+
+/* options for ANSI C 89 (only 1-char options) */
+#define L_STRFTIMEC89 "aAbBcdHIjmMpSUwWxXyYZ%"
+
+/* options for ISO C 99 and POSIX */
+#define L_STRFTIMEC99 "aAbBcCdDeFgGhHIjmMnprRStTuUVwWxXyYzZ%" \
+ "||" "EcECExEXEyEY" "OdOeOHOIOmOMOSOuOUOVOwOWOy" /* two-char options */
+
+/* options for Windows */
+#define L_STRFTIMEWIN "aAbBcdHIjmMpSUwWxXyYzZ%" \
+ "||" "#c#x#d#H#I#j#m#M#S#U#w#W#y#Y" /* two-char options */
+
+#if defined(LUA_USE_WINDOWS)
+#define LUA_STRFTIMEOPTIONS L_STRFTIMEWIN
+#elif defined(LUA_USE_C89)
+#define LUA_STRFTIMEOPTIONS L_STRFTIMEC89
+#else /* C99 specification */
+#define LUA_STRFTIMEOPTIONS L_STRFTIMEC99
+#endif
+
+#endif /* } */
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Configuration for time-related stuff
+** ===================================================================
+*/
+
+#if !defined(l_time_t) /* { */
+/*
+** type to represent time_t in Lua
+*/
+#define l_timet lua_Integer
+#define l_pushtime(L,t) lua_pushinteger(L,(lua_Integer)(t))
+
+static time_t l_checktime (lua_State *L, int arg) {
+ lua_Integer t = luaL_checkinteger(L, arg);
+ luaL_argcheck(L, (time_t)t == t, arg, "time out-of-bounds");
+ return (time_t)t;
+}
+
+#endif /* } */
+
+
+#if !defined(l_gmtime) /* { */
+/*
+** By default, Lua uses gmtime/localtime, except when POSIX is available,
+** where it uses gmtime_r/localtime_r
+*/
+
+#if defined(LUA_USE_POSIX) /* { */
+
+#define l_gmtime(t,r) gmtime_r(t,r)
+#define l_localtime(t,r) localtime_r(t,r)
+
+#else /* }{ */
+
+/* ISO C definitions */
+#define l_gmtime(t,r) ((void)(r)->tm_sec, gmtime(t))
+#define l_localtime(t,r) ((void)(r)->tm_sec, localtime(t))
+
+#endif /* } */
+
+#endif /* } */
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Configuration for 'tmpnam':
+** By default, Lua uses tmpnam except when POSIX is available, where
+** it uses mkstemp.
+** ===================================================================
+*/
+#if !defined(lua_tmpnam) /* { */
+
+#if defined(LUA_USE_POSIX) /* { */
+
+#include <unistd.h>
+
+#define LUA_TMPNAMBUFSIZE 32
+
+#if !defined(LUA_TMPNAMTEMPLATE)
+#define LUA_TMPNAMTEMPLATE "/tmp/lua_XXXXXX"
+#endif
+
+#define lua_tmpnam(b,e) { \
+ strcpy(b, LUA_TMPNAMTEMPLATE); \
+ e = mkstemp(b); \
+ if (e != -1) close(e); \
+ e = (e == -1); }
+
+#else /* }{ */
+
+/* ISO C definitions */
+#define LUA_TMPNAMBUFSIZE L_tmpnam
+#define lua_tmpnam(b,e) { e = (tmpnam(b) == NULL); }
+
+#endif /* } */
+
+#endif /* } */
+/* }================================================================== */
+
+
+
+
+static int os_execute (lua_State *L) {
+ const char *cmd = luaL_optstring(L, 1, NULL);
+ int stat = system(cmd);
+ if (cmd != NULL)
+ return luaL_execresult(L, stat);
+ else {
+ lua_pushboolean(L, stat); /* true if there is a shell */
+ return 1;
+ }
+}
+
+
+static int os_remove (lua_State *L) {
+ const char *filename = luaL_checkstring(L, 1);
+ return luaL_fileresult(L, remove(filename) == 0, filename);
+}
+
+
+static int os_rename (lua_State *L) {
+ const char *fromname = luaL_checkstring(L, 1);
+ const char *toname = luaL_checkstring(L, 2);
+ return luaL_fileresult(L, rename(fromname, toname) == 0, NULL);
+}
+
+
+static int os_tmpname (lua_State *L) {
+ char buff[LUA_TMPNAMBUFSIZE];
+ int err;
+ lua_tmpnam(buff, err);
+ if (err)
+ return luaL_error(L, "unable to generate a unique filename");
+ lua_pushstring(L, buff);
+ return 1;
+}
+
+
+static int os_getenv (lua_State *L) {
+ lua_pushstring(L, getenv(luaL_checkstring(L, 1))); /* if NULL push nil */
+ return 1;
+}
+
+
+static int os_clock (lua_State *L) {
+ lua_pushnumber(L, ((lua_Number)clock())/(lua_Number)CLOCKS_PER_SEC);
+ return 1;
+}
+
+
+/*
+** {======================================================
+** Time/Date operations
+** { year=%Y, month=%m, day=%d, hour=%H, min=%M, sec=%S,
+** wday=%w+1, yday=%j, isdst=? }
+** =======================================================
+*/
+
+static void setfield (lua_State *L, const char *key, int value) {
+ lua_pushinteger(L, value);
+ lua_setfield(L, -2, key);
+}
+
+static void setboolfield (lua_State *L, const char *key, int value) {
+ if (value < 0) /* undefined? */
+ return; /* does not set field */
+ lua_pushboolean(L, value);
+ lua_setfield(L, -2, key);
+}
+
+
+/*
+** Set all fields from structure 'tm' in the table on top of the stack
+*/
+static void setallfields (lua_State *L, struct tm *stm) {
+ setfield(L, "sec", stm->tm_sec);
+ setfield(L, "min", stm->tm_min);
+ setfield(L, "hour", stm->tm_hour);
+ setfield(L, "day", stm->tm_mday);
+ setfield(L, "month", stm->tm_mon + 1);
+ setfield(L, "year", stm->tm_year + 1900);
+ setfield(L, "wday", stm->tm_wday + 1);
+ setfield(L, "yday", stm->tm_yday + 1);
+ setboolfield(L, "isdst", stm->tm_isdst);
+}
+
+
+static int getboolfield (lua_State *L, const char *key) {
+ int res;
+ res = (lua_getfield(L, -1, key) == LUA_TNIL) ? -1 : lua_toboolean(L, -1);
+ lua_pop(L, 1);
+ return res;
+}
+
+
+/* maximum value for date fields (to avoid arithmetic overflows with 'int') */
+#if !defined(L_MAXDATEFIELD)
+#define L_MAXDATEFIELD (INT_MAX / 2)
+#endif
+
+static int getfield (lua_State *L, const char *key, int d, int delta) {
+ int isnum;
+ int t = lua_getfield(L, -1, key); /* get field and its type */
+ lua_Integer res = lua_tointegerx(L, -1, &isnum);
+ if (!isnum) { /* field is not an integer? */
+ if (t != LUA_TNIL) /* some other value? */
+ return luaL_error(L, "field '%s' is not an integer", key);
+ else if (d < 0) /* absent field; no default? */
+ return luaL_error(L, "field '%s' missing in date table", key);
+ res = d;
+ }
+ else {
+ if (!(-L_MAXDATEFIELD <= res && res <= L_MAXDATEFIELD))
+ return luaL_error(L, "field '%s' is out-of-bound", key);
+ res -= delta;
+ }
+ lua_pop(L, 1);
+ return (int)res;
+}
+
+
+static const char *checkoption (lua_State *L, const char *conv,
+ ptrdiff_t convlen, char *buff) {
+ const char *option = LUA_STRFTIMEOPTIONS;
+ int oplen = 1; /* length of options being checked */
+ for (; *option != '\0' && oplen <= convlen; option += oplen) {
+ if (*option == '|') /* next block? */
+ oplen++; /* will check options with next length (+1) */
+ else if (memcmp(conv, option, oplen) == 0) { /* match? */
+ memcpy(buff, conv, oplen); /* copy valid option to buffer */
+ buff[oplen] = '\0';
+ return conv + oplen; /* return next item */
+ }
+ }
+ luaL_argerror(L, 1,
+ lua_pushfstring(L, "invalid conversion specifier '%%%s'", conv));
+ return conv; /* to avoid warnings */
+}
+
+
+/* maximum size for an individual 'strftime' item */
+#define SIZETIMEFMT 250
+
+
+static int os_date (lua_State *L) {
+ size_t slen;
+ const char *s = luaL_optlstring(L, 1, "%c", &slen);
+ time_t t = luaL_opt(L, l_checktime, 2, time(NULL));
+ const char *se = s + slen; /* 's' end */
+ struct tm tmr, *stm;
+ if (*s == '!') { /* UTC? */
+ stm = l_gmtime(&t, &tmr);
+ s++; /* skip '!' */
+ }
+ else
+ stm = l_localtime(&t, &tmr);
+ if (stm == NULL) /* invalid date? */
+ luaL_error(L, "time result cannot be represented in this installation");
+ if (strcmp(s, "*t") == 0) {
+ lua_createtable(L, 0, 9); /* 9 = number of fields */
+ setallfields(L, stm);
+ }
+ else {
+ char cc[4]; /* buffer for individual conversion specifiers */
+ luaL_Buffer b;
+ cc[0] = '%';
+ luaL_buffinit(L, &b);
+ while (s < se) {
+ if (*s != '%') /* not a conversion specifier? */
+ luaL_addchar(&b, *s++);
+ else {
+ size_t reslen;
+ char *buff = luaL_prepbuffsize(&b, SIZETIMEFMT);
+ s++; /* skip '%' */
+ s = checkoption(L, s, se - s, cc + 1); /* copy specifier to 'cc' */
+ reslen = strftime(buff, SIZETIMEFMT, cc, stm);
+ luaL_addsize(&b, reslen);
+ }
+ }
+ luaL_pushresult(&b);
+ }
+ return 1;
+}
+
+
+static int os_time (lua_State *L) {
+ time_t t;
+ if (lua_isnoneornil(L, 1)) /* called without args? */
+ t = time(NULL); /* get current time */
+ else {
+ struct tm ts;
+ luaL_checktype(L, 1, LUA_TTABLE);
+ lua_settop(L, 1); /* make sure table is at the top */
+ ts.tm_sec = getfield(L, "sec", 0, 0);
+ ts.tm_min = getfield(L, "min", 0, 0);
+ ts.tm_hour = getfield(L, "hour", 12, 0);
+ ts.tm_mday = getfield(L, "day", -1, 0);
+ ts.tm_mon = getfield(L, "month", -1, 1);
+ ts.tm_year = getfield(L, "year", -1, 1900);
+ ts.tm_isdst = getboolfield(L, "isdst");
+ t = mktime(&ts);
+ setallfields(L, &ts); /* update fields with normalized values */
+ }
+ if (t != (time_t)(l_timet)t || t == (time_t)(-1))
+ luaL_error(L, "time result cannot be represented in this installation");
+ l_pushtime(L, t);
+ return 1;
+}
+
+
+static int os_difftime (lua_State *L) {
+ time_t t1 = l_checktime(L, 1);
+ time_t t2 = l_checktime(L, 2);
+ lua_pushnumber(L, (lua_Number)difftime(t1, t2));
+ return 1;
+}
+
+/* }====================================================== */
+
+
+static int os_setlocale (lua_State *L) {
+ static const int cat[] = {LC_ALL, LC_COLLATE, LC_CTYPE, LC_MONETARY,
+ LC_NUMERIC, LC_TIME};
+ static const char *const catnames[] = {"all", "collate", "ctype", "monetary",
+ "numeric", "time", NULL};
+ const char *l = luaL_optstring(L, 1, NULL);
+ int op = luaL_checkoption(L, 2, "all", catnames);
+ lua_pushstring(L, setlocale(cat[op], l));
+ return 1;
+}
+
+
+static int os_exit (lua_State *L) {
+ int status;
+ if (lua_isboolean(L, 1))
+ status = (lua_toboolean(L, 1) ? EXIT_SUCCESS : EXIT_FAILURE);
+ else
+ status = (int)luaL_optinteger(L, 1, EXIT_SUCCESS);
+ if (lua_toboolean(L, 2))
+ lua_close(L);
+ if (L) exit(status); /* 'if' to avoid warnings for unreachable 'return' */
+ return 0;
+}
+
+
+static const luaL_Reg syslib[] = {
+ {"clock", os_clock},
+ {"date", os_date},
+ {"difftime", os_difftime},
+ {"execute", os_execute},
+ {"exit", os_exit},
+ {"getenv", os_getenv},
+ {"remove", os_remove},
+ {"rename", os_rename},
+ {"setlocale", os_setlocale},
+ {"time", os_time},
+ {"tmpname", os_tmpname},
+ {NULL, NULL}
+};
+
+/* }====================================================== */
+
+
+
+LUAMOD_API int luaopen_os (lua_State *L) {
+ luaL_newlib(L, syslib);
+ return 1;
+}
+
diff --git a/lua/src/lparser.c b/lua/src/lparser.c
new file mode 100644
index 000000000..cd4512d4d
--- /dev/null
+++ b/lua/src/lparser.c
@@ -0,0 +1,1650 @@
+/*
+** $Id: lparser.c,v 2.155 2016/08/01 19:51:24 roberto Exp $
+** Lua Parser
+** See Copyright Notice in lua.h
+*/
+
+#define lparser_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "lcode.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "llex.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lparser.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+
+
+
+/* maximum number of local variables per function (must be smaller
+ than 250, due to the bytecode format) */
+#define MAXVARS 200
+
+
+#define hasmultret(k) ((k) == VCALL || (k) == VVARARG)
+
+
+/* because all strings are unified by the scanner, the parser
+ can use pointer equality for string equality */
+#define eqstr(a,b) ((a) == (b))
+
+
+/*
+** nodes for block list (list of active blocks)
+*/
+typedef struct BlockCnt {
+ struct BlockCnt *previous; /* chain */
+ int firstlabel; /* index of first label in this block */
+ int firstgoto; /* index of first pending goto in this block */
+ lu_byte nactvar; /* # active locals outside the block */
+ lu_byte upval; /* true if some variable in the block is an upvalue */
+ lu_byte isloop; /* true if 'block' is a loop */
+} BlockCnt;
+
+
+
+/*
+** prototypes for recursive non-terminal functions
+*/
+static void statement (LexState *ls);
+static void expr (LexState *ls, expdesc *v);
+
+
+/* semantic error */
+static l_noret semerror (LexState *ls, const char *msg) {
+ ls->t.token = 0; /* remove "near <token>" from final message */
+ luaX_syntaxerror(ls, msg);
+}
+
+
+static l_noret error_expected (LexState *ls, int token) {
+ luaX_syntaxerror(ls,
+ luaO_pushfstring(ls->L, "%s expected", luaX_token2str(ls, token)));
+}
+
+
+static l_noret errorlimit (FuncState *fs, int limit, const char *what) {
+ lua_State *L = fs->ls->L;
+ const char *msg;
+ int line = fs->f->linedefined;
+ const char *where = (line == 0)
+ ? "main function"
+ : luaO_pushfstring(L, "function at line %d", line);
+ msg = luaO_pushfstring(L, "too many %s (limit is %d) in %s",
+ what, limit, where);
+ luaX_syntaxerror(fs->ls, msg);
+}
+
+
+static void checklimit (FuncState *fs, int v, int l, const char *what) {
+ if (v > l) errorlimit(fs, l, what);
+}
+
+
+static int testnext (LexState *ls, int c) {
+ if (ls->t.token == c) {
+ luaX_next(ls);
+ return 1;
+ }
+ else return 0;
+}
+
+
+static void check (LexState *ls, int c) {
+ if (ls->t.token != c)
+ error_expected(ls, c);
+}
+
+
+static void checknext (LexState *ls, int c) {
+ check(ls, c);
+ luaX_next(ls);
+}
+
+
+#define check_condition(ls,c,msg) { if (!(c)) luaX_syntaxerror(ls, msg); }
+
+
+
+static void check_match (LexState *ls, int what, int who, int where) {
+ if (!testnext(ls, what)) {
+ if (where == ls->linenumber)
+ error_expected(ls, what);
+ else {
+ luaX_syntaxerror(ls, luaO_pushfstring(ls->L,
+ "%s expected (to close %s at line %d)",
+ luaX_token2str(ls, what), luaX_token2str(ls, who), where));
+ }
+ }
+}
+
+
+static TString *str_checkname (LexState *ls) {
+ TString *ts;
+ check(ls, TK_NAME);
+ ts = ls->t.seminfo.ts;
+ luaX_next(ls);
+ return ts;
+}
+
+
+static void init_exp (expdesc *e, expkind k, int i) {
+ e->f = e->t = NO_JUMP;
+ e->k = k;
+ e->u.info = i;
+}
+
+
+static void codestring (LexState *ls, expdesc *e, TString *s) {
+ init_exp(e, VK, luaK_stringK(ls->fs, s));
+}
+
+
+static void checkname (LexState *ls, expdesc *e) {
+ codestring(ls, e, str_checkname(ls));
+}
+
+
+static int registerlocalvar (LexState *ls, TString *varname) {
+ FuncState *fs = ls->fs;
+ Proto *f = fs->f;
+ int oldsize = f->sizelocvars;
+ luaM_growvector(ls->L, f->locvars, fs->nlocvars, f->sizelocvars,
+ LocVar, SHRT_MAX, "local variables");
+ while (oldsize < f->sizelocvars)
+ f->locvars[oldsize++].varname = NULL;
+ f->locvars[fs->nlocvars].varname = varname;
+ luaC_objbarrier(ls->L, f, varname);
+ return fs->nlocvars++;
+}
+
+
+static void new_localvar (LexState *ls, TString *name) {
+ FuncState *fs = ls->fs;
+ Dyndata *dyd = ls->dyd;
+ int reg = registerlocalvar(ls, name);
+ checklimit(fs, dyd->actvar.n + 1 - fs->firstlocal,
+ MAXVARS, "local variables");
+ luaM_growvector(ls->L, dyd->actvar.arr, dyd->actvar.n + 1,
+ dyd->actvar.size, Vardesc, MAX_INT, "local variables");
+ dyd->actvar.arr[dyd->actvar.n++].idx = cast(short, reg);
+}
+
+
+static void new_localvarliteral_ (LexState *ls, const char *name, size_t sz) {
+ new_localvar(ls, luaX_newstring(ls, name, sz));
+}
+
+#define new_localvarliteral(ls,v) \
+ new_localvarliteral_(ls, "" v, (sizeof(v)/sizeof(char))-1)
+
+
+static LocVar *getlocvar (FuncState *fs, int i) {
+ int idx = fs->ls->dyd->actvar.arr[fs->firstlocal + i].idx;
+ lua_assert(idx < fs->nlocvars);
+ return &fs->f->locvars[idx];
+}
+
+
+static void adjustlocalvars (LexState *ls, int nvars) {
+ FuncState *fs = ls->fs;
+ fs->nactvar = cast_byte(fs->nactvar + nvars);
+ for (; nvars; nvars--) {
+ getlocvar(fs, fs->nactvar - nvars)->startpc = fs->pc;
+ }
+}
+
+
+static void removevars (FuncState *fs, int tolevel) {
+ fs->ls->dyd->actvar.n -= (fs->nactvar - tolevel);
+ while (fs->nactvar > tolevel)
+ getlocvar(fs, --fs->nactvar)->endpc = fs->pc;
+}
+
+
+static int searchupvalue (FuncState *fs, TString *name) {
+ int i;
+ Upvaldesc *up = fs->f->upvalues;
+ for (i = 0; i < fs->nups; i++) {
+ if (eqstr(up[i].name, name)) return i;
+ }
+ return -1; /* not found */
+}
+
+
+static int newupvalue (FuncState *fs, TString *name, expdesc *v) {
+ Proto *f = fs->f;
+ int oldsize = f->sizeupvalues;
+ checklimit(fs, fs->nups + 1, MAXUPVAL, "upvalues");
+ luaM_growvector(fs->ls->L, f->upvalues, fs->nups, f->sizeupvalues,
+ Upvaldesc, MAXUPVAL, "upvalues");
+ while (oldsize < f->sizeupvalues)
+ f->upvalues[oldsize++].name = NULL;
+ f->upvalues[fs->nups].instack = (v->k == VLOCAL);
+ f->upvalues[fs->nups].idx = cast_byte(v->u.info);
+ f->upvalues[fs->nups].name = name;
+ luaC_objbarrier(fs->ls->L, f, name);
+ return fs->nups++;
+}
+
+
+static int searchvar (FuncState *fs, TString *n) {
+ int i;
+ for (i = cast_int(fs->nactvar) - 1; i >= 0; i--) {
+ if (eqstr(n, getlocvar(fs, i)->varname))
+ return i;
+ }
+ return -1; /* not found */
+}
+
+
+/*
+ Mark block where variable at given level was defined
+ (to emit close instructions later).
+*/
+static void markupval (FuncState *fs, int level) {
+ BlockCnt *bl = fs->bl;
+ while (bl->nactvar > level)
+ bl = bl->previous;
+ bl->upval = 1;
+}
+
+
+/*
+ Find variable with given name 'n'. If it is an upvalue, add this
+ upvalue into all intermediate functions.
+*/
+static void singlevaraux (FuncState *fs, TString *n, expdesc *var, int base) {
+ if (fs == NULL) /* no more levels? */
+ init_exp(var, VVOID, 0); /* default is global */
+ else {
+ int v = searchvar(fs, n); /* look up locals at current level */
+ if (v >= 0) { /* found? */
+ init_exp(var, VLOCAL, v); /* variable is local */
+ if (!base)
+ markupval(fs, v); /* local will be used as an upval */
+ }
+ else { /* not found as local at current level; try upvalues */
+ int idx = searchupvalue(fs, n); /* try existing upvalues */
+ if (idx < 0) { /* not found? */
+ singlevaraux(fs->prev, n, var, 0); /* try upper levels */
+ if (var->k == VVOID) /* not found? */
+ return; /* it is a global */
+ /* else was LOCAL or UPVAL */
+ idx = newupvalue(fs, n, var); /* will be a new upvalue */
+ }
+ init_exp(var, VUPVAL, idx); /* new or old upvalue */
+ }
+ }
+}
+
+
+static void singlevar (LexState *ls, expdesc *var) {
+ TString *varname = str_checkname(ls);
+ FuncState *fs = ls->fs;
+ singlevaraux(fs, varname, var, 1);
+ if (var->k == VVOID) { /* global name? */
+ expdesc key;
+ singlevaraux(fs, ls->envn, var, 1); /* get environment variable */
+ lua_assert(var->k != VVOID); /* this one must exist */
+ codestring(ls, &key, varname); /* key is variable name */
+ luaK_indexed(fs, var, &key); /* env[varname] */
+ }
+}
+
+
+static void adjust_assign (LexState *ls, int nvars, int nexps, expdesc *e) {
+ FuncState *fs = ls->fs;
+ int extra = nvars - nexps;
+ if (hasmultret(e->k)) {
+ extra++; /* includes call itself */
+ if (extra < 0) extra = 0;
+ luaK_setreturns(fs, e, extra); /* last exp. provides the difference */
+ if (extra > 1) luaK_reserveregs(fs, extra-1);
+ }
+ else {
+ if (e->k != VVOID) luaK_exp2nextreg(fs, e); /* close last expression */
+ if (extra > 0) {
+ int reg = fs->freereg;
+ luaK_reserveregs(fs, extra);
+ luaK_nil(fs, reg, extra);
+ }
+ }
+ if (nexps > nvars)
+ ls->fs->freereg -= nexps - nvars; /* remove extra values */
+}
+
+
+static void enterlevel (LexState *ls) {
+ lua_State *L = ls->L;
+ ++L->nCcalls;
+ checklimit(ls->fs, L->nCcalls, LUAI_MAXCCALLS, "C levels");
+}
+
+
+#define leavelevel(ls) ((ls)->L->nCcalls--)
+
+
+static void closegoto (LexState *ls, int g, Labeldesc *label) {
+ int i;
+ FuncState *fs = ls->fs;
+ Labellist *gl = &ls->dyd->gt;
+ Labeldesc *gt = &gl->arr[g];
+ lua_assert(eqstr(gt->name, label->name));
+ if (gt->nactvar < label->nactvar) {
+ TString *vname = getlocvar(fs, gt->nactvar)->varname;
+ const char *msg = luaO_pushfstring(ls->L,
+ "<goto %s> at line %d jumps into the scope of local '%s'",
+ getstr(gt->name), gt->line, getstr(vname));
+ semerror(ls, msg);
+ }
+ luaK_patchlist(fs, gt->pc, label->pc);
+ /* remove goto from pending list */
+ for (i = g; i < gl->n - 1; i++)
+ gl->arr[i] = gl->arr[i + 1];
+ gl->n--;
+}
+
+
+/*
+** try to close a goto with existing labels; this solves backward jumps
+*/
+static int findlabel (LexState *ls, int g) {
+ int i;
+ BlockCnt *bl = ls->fs->bl;
+ Dyndata *dyd = ls->dyd;
+ Labeldesc *gt = &dyd->gt.arr[g];
+ /* check labels in current block for a match */
+ for (i = bl->firstlabel; i < dyd->label.n; i++) {
+ Labeldesc *lb = &dyd->label.arr[i];
+ if (eqstr(lb->name, gt->name)) { /* correct label? */
+ if (gt->nactvar > lb->nactvar &&
+ (bl->upval || dyd->label.n > bl->firstlabel))
+ luaK_patchclose(ls->fs, gt->pc, lb->nactvar);
+ closegoto(ls, g, lb); /* close it */
+ return 1;
+ }
+ }
+ return 0; /* label not found; cannot close goto */
+}
+
+
+static int newlabelentry (LexState *ls, Labellist *l, TString *name,
+ int line, int pc) {
+ int n = l->n;
+ luaM_growvector(ls->L, l->arr, n, l->size,
+ Labeldesc, SHRT_MAX, "labels/gotos");
+ l->arr[n].name = name;
+ l->arr[n].line = line;
+ l->arr[n].nactvar = ls->fs->nactvar;
+ l->arr[n].pc = pc;
+ l->n = n + 1;
+ return n;
+}
+
+
+/*
+** check whether new label 'lb' matches any pending gotos in current
+** block; solves forward jumps
+*/
+static void findgotos (LexState *ls, Labeldesc *lb) {
+ Labellist *gl = &ls->dyd->gt;
+ int i = ls->fs->bl->firstgoto;
+ while (i < gl->n) {
+ if (eqstr(gl->arr[i].name, lb->name))
+ closegoto(ls, i, lb);
+ else
+ i++;
+ }
+}
+
+
+/*
+** export pending gotos to outer level, to check them against
+** outer labels; if the block being exited has upvalues, and
+** the goto exits the scope of any variable (which can be the
+** upvalue), close those variables being exited.
+*/
+static void movegotosout (FuncState *fs, BlockCnt *bl) {
+ int i = bl->firstgoto;
+ Labellist *gl = &fs->ls->dyd->gt;
+ /* correct pending gotos to current block and try to close it
+ with visible labels */
+ while (i < gl->n) {
+ Labeldesc *gt = &gl->arr[i];
+ if (gt->nactvar > bl->nactvar) {
+ if (bl->upval)
+ luaK_patchclose(fs, gt->pc, bl->nactvar);
+ gt->nactvar = bl->nactvar;
+ }
+ if (!findlabel(fs->ls, i))
+ i++; /* move to next one */
+ }
+}
+
+
+static void enterblock (FuncState *fs, BlockCnt *bl, lu_byte isloop) {
+ bl->isloop = isloop;
+ bl->nactvar = fs->nactvar;
+ bl->firstlabel = fs->ls->dyd->label.n;
+ bl->firstgoto = fs->ls->dyd->gt.n;
+ bl->upval = 0;
+ bl->previous = fs->bl;
+ fs->bl = bl;
+ lua_assert(fs->freereg == fs->nactvar);
+}
+
+
+/*
+** create a label named 'break' to resolve break statements
+*/
+static void breaklabel (LexState *ls) {
+ TString *n = luaS_new(ls->L, "break");
+ int l = newlabelentry(ls, &ls->dyd->label, n, 0, ls->fs->pc);
+ findgotos(ls, &ls->dyd->label.arr[l]);
+}
+
+/*
+** generates an error for an undefined 'goto'; choose appropriate
+** message when label name is a reserved word (which can only be 'break')
+*/
+static l_noret undefgoto (LexState *ls, Labeldesc *gt) {
+ const char *msg = isreserved(gt->name)
+ ? "<%s> at line %d not inside a loop"
+ : "no visible label '%s' for <goto> at line %d";
+ msg = luaO_pushfstring(ls->L, msg, getstr(gt->name), gt->line);
+ semerror(ls, msg);
+}
+
+
+static void leaveblock (FuncState *fs) {
+ BlockCnt *bl = fs->bl;
+ LexState *ls = fs->ls;
+ if (bl->previous && bl->upval) {
+ /* create a 'jump to here' to close upvalues */
+ int j = luaK_jump(fs);
+ luaK_patchclose(fs, j, bl->nactvar);
+ luaK_patchtohere(fs, j);
+ }
+ if (bl->isloop)
+ breaklabel(ls); /* close pending breaks */
+ fs->bl = bl->previous;
+ removevars(fs, bl->nactvar);
+ lua_assert(bl->nactvar == fs->nactvar);
+ fs->freereg = fs->nactvar; /* free registers */
+ ls->dyd->label.n = bl->firstlabel; /* remove local labels */
+ if (bl->previous) /* inner block? */
+ movegotosout(fs, bl); /* update pending gotos to outer block */
+ else if (bl->firstgoto < ls->dyd->gt.n) /* pending gotos in outer block? */
+ undefgoto(ls, &ls->dyd->gt.arr[bl->firstgoto]); /* error */
+}
+
+
+/*
+** adds a new prototype into list of prototypes
+*/
+static Proto *addprototype (LexState *ls) {
+ Proto *clp;
+ lua_State *L = ls->L;
+ FuncState *fs = ls->fs;
+ Proto *f = fs->f; /* prototype of current function */
+ if (fs->np >= f->sizep) {
+ int oldsize = f->sizep;
+ luaM_growvector(L, f->p, fs->np, f->sizep, Proto *, MAXARG_Bx, "functions");
+ while (oldsize < f->sizep)
+ f->p[oldsize++] = NULL;
+ }
+ f->p[fs->np++] = clp = luaF_newproto(L);
+ luaC_objbarrier(L, f, clp);
+ return clp;
+}
+
+
+/*
+** codes instruction to create new closure in parent function.
+** The OP_CLOSURE instruction must use the last available register,
+** so that, if it invokes the GC, the GC knows which registers
+** are in use at that time.
+*/
+static void codeclosure (LexState *ls, expdesc *v) {
+ FuncState *fs = ls->fs->prev;
+ init_exp(v, VRELOCABLE, luaK_codeABx(fs, OP_CLOSURE, 0, fs->np - 1));
+ luaK_exp2nextreg(fs, v); /* fix it at the last register */
+}
+
+
+static void open_func (LexState *ls, FuncState *fs, BlockCnt *bl) {
+ Proto *f;
+ fs->prev = ls->fs; /* linked list of funcstates */
+ fs->ls = ls;
+ ls->fs = fs;
+ fs->pc = 0;
+ fs->lasttarget = 0;
+ fs->jpc = NO_JUMP;
+ fs->freereg = 0;
+ fs->nk = 0;
+ fs->np = 0;
+ fs->nups = 0;
+ fs->nlocvars = 0;
+ fs->nactvar = 0;
+ fs->firstlocal = ls->dyd->actvar.n;
+ fs->bl = NULL;
+ f = fs->f;
+ f->source = ls->source;
+ f->maxstacksize = 2; /* registers 0/1 are always valid */
+ enterblock(fs, bl, 0);
+}
+
+
+static void close_func (LexState *ls) {
+ lua_State *L = ls->L;
+ FuncState *fs = ls->fs;
+ Proto *f = fs->f;
+ luaK_ret(fs, 0, 0); /* final return */
+ leaveblock(fs);
+ luaM_reallocvector(L, f->code, f->sizecode, fs->pc, Instruction);
+ f->sizecode = fs->pc;
+ luaM_reallocvector(L, f->lineinfo, f->sizelineinfo, fs->pc, int);
+ f->sizelineinfo = fs->pc;
+ luaM_reallocvector(L, f->k, f->sizek, fs->nk, TValue);
+ f->sizek = fs->nk;
+ luaM_reallocvector(L, f->p, f->sizep, fs->np, Proto *);
+ f->sizep = fs->np;
+ luaM_reallocvector(L, f->locvars, f->sizelocvars, fs->nlocvars, LocVar);
+ f->sizelocvars = fs->nlocvars;
+ luaM_reallocvector(L, f->upvalues, f->sizeupvalues, fs->nups, Upvaldesc);
+ f->sizeupvalues = fs->nups;
+ lua_assert(fs->bl == NULL);
+ ls->fs = fs->prev;
+ luaC_checkGC(L);
+}
+
+
+
+/*============================================================*/
+/* GRAMMAR RULES */
+/*============================================================*/
+
+
+/*
+** check whether current token is in the follow set of a block.
+** 'until' closes syntactical blocks, but do not close scope,
+** so it is handled in separate.
+*/
+static int block_follow (LexState *ls, int withuntil) {
+ switch (ls->t.token) {
+ case TK_ELSE: case TK_ELSEIF:
+ case TK_END: case TK_EOS:
+ return 1;
+ case TK_UNTIL: return withuntil;
+ default: return 0;
+ }
+}
+
+
+static void statlist (LexState *ls) {
+ /* statlist -> { stat [';'] } */
+ while (!block_follow(ls, 1)) {
+ if (ls->t.token == TK_RETURN) {
+ statement(ls);
+ return; /* 'return' must be last statement */
+ }
+ statement(ls);
+ }
+}
+
+
+static void fieldsel (LexState *ls, expdesc *v) {
+ /* fieldsel -> ['.' | ':'] NAME */
+ FuncState *fs = ls->fs;
+ expdesc key;
+ luaK_exp2anyregup(fs, v);
+ luaX_next(ls); /* skip the dot or colon */
+ checkname(ls, &key);
+ luaK_indexed(fs, v, &key);
+}
+
+
+static void yindex (LexState *ls, expdesc *v) {
+ /* index -> '[' expr ']' */
+ luaX_next(ls); /* skip the '[' */
+ expr(ls, v);
+ luaK_exp2val(ls->fs, v);
+ checknext(ls, ']');
+}
+
+
+/*
+** {======================================================================
+** Rules for Constructors
+** =======================================================================
+*/
+
+
+struct ConsControl {
+ expdesc v; /* last list item read */
+ expdesc *t; /* table descriptor */
+ int nh; /* total number of 'record' elements */
+ int na; /* total number of array elements */
+ int tostore; /* number of array elements pending to be stored */
+};
+
+
+static void recfield (LexState *ls, struct ConsControl *cc) {
+ /* recfield -> (NAME | '['exp1']') = exp1 */
+ FuncState *fs = ls->fs;
+ int reg = ls->fs->freereg;
+ expdesc key, val;
+ int rkkey;
+ if (ls->t.token == TK_NAME) {
+ checklimit(fs, cc->nh, MAX_INT, "items in a constructor");
+ checkname(ls, &key);
+ }
+ else /* ls->t.token == '[' */
+ yindex(ls, &key);
+ cc->nh++;
+ checknext(ls, '=');
+ rkkey = luaK_exp2RK(fs, &key);
+ expr(ls, &val);
+ luaK_codeABC(fs, OP_SETTABLE, cc->t->u.info, rkkey, luaK_exp2RK(fs, &val));
+ fs->freereg = reg; /* free registers */
+}
+
+
+static void closelistfield (FuncState *fs, struct ConsControl *cc) {
+ if (cc->v.k == VVOID) return; /* there is no list item */
+ luaK_exp2nextreg(fs, &cc->v);
+ cc->v.k = VVOID;
+ if (cc->tostore == LFIELDS_PER_FLUSH) {
+ luaK_setlist(fs, cc->t->u.info, cc->na, cc->tostore); /* flush */
+ cc->tostore = 0; /* no more items pending */
+ }
+}
+
+
+static void lastlistfield (FuncState *fs, struct ConsControl *cc) {
+ if (cc->tostore == 0) return;
+ if (hasmultret(cc->v.k)) {
+ luaK_setmultret(fs, &cc->v);
+ luaK_setlist(fs, cc->t->u.info, cc->na, LUA_MULTRET);
+ cc->na--; /* do not count last expression (unknown number of elements) */
+ }
+ else {
+ if (cc->v.k != VVOID)
+ luaK_exp2nextreg(fs, &cc->v);
+ luaK_setlist(fs, cc->t->u.info, cc->na, cc->tostore);
+ }
+}
+
+
+static void listfield (LexState *ls, struct ConsControl *cc) {
+ /* listfield -> exp */
+ expr(ls, &cc->v);
+ checklimit(ls->fs, cc->na, MAX_INT, "items in a constructor");
+ cc->na++;
+ cc->tostore++;
+}
+
+
+static void field (LexState *ls, struct ConsControl *cc) {
+ /* field -> listfield | recfield */
+ switch(ls->t.token) {
+ case TK_NAME: { /* may be 'listfield' or 'recfield' */
+ if (luaX_lookahead(ls) != '=') /* expression? */
+ listfield(ls, cc);
+ else
+ recfield(ls, cc);
+ break;
+ }
+ case '[': {
+ recfield(ls, cc);
+ break;
+ }
+ default: {
+ listfield(ls, cc);
+ break;
+ }
+ }
+}
+
+
+static void constructor (LexState *ls, expdesc *t) {
+ /* constructor -> '{' [ field { sep field } [sep] ] '}'
+ sep -> ',' | ';' */
+ FuncState *fs = ls->fs;
+ int line = ls->linenumber;
+ int pc = luaK_codeABC(fs, OP_NEWTABLE, 0, 0, 0);
+ struct ConsControl cc;
+ cc.na = cc.nh = cc.tostore = 0;
+ cc.t = t;
+ init_exp(t, VRELOCABLE, pc);
+ init_exp(&cc.v, VVOID, 0); /* no value (yet) */
+ luaK_exp2nextreg(ls->fs, t); /* fix it at stack top */
+ checknext(ls, '{');
+ do {
+ lua_assert(cc.v.k == VVOID || cc.tostore > 0);
+ if (ls->t.token == '}') break;
+ closelistfield(fs, &cc);
+ field(ls, &cc);
+ } while (testnext(ls, ',') || testnext(ls, ';'));
+ check_match(ls, '}', '{', line);
+ lastlistfield(fs, &cc);
+ SETARG_B(fs->f->code[pc], luaO_int2fb(cc.na)); /* set initial array size */
+ SETARG_C(fs->f->code[pc], luaO_int2fb(cc.nh)); /* set initial table size */
+}
+
+/* }====================================================================== */
+
+
+
+static void parlist (LexState *ls) {
+ /* parlist -> [ param { ',' param } ] */
+ FuncState *fs = ls->fs;
+ Proto *f = fs->f;
+ int nparams = 0;
+ f->is_vararg = 0;
+ if (ls->t.token != ')') { /* is 'parlist' not empty? */
+ do {
+ switch (ls->t.token) {
+ case TK_NAME: { /* param -> NAME */
+ new_localvar(ls, str_checkname(ls));
+ nparams++;
+ break;
+ }
+ case TK_DOTS: { /* param -> '...' */
+ luaX_next(ls);
+ f->is_vararg = 1; /* declared vararg */
+ break;
+ }
+ default: luaX_syntaxerror(ls, "<name> or '...' expected");
+ }
+ } while (!f->is_vararg && testnext(ls, ','));
+ }
+ adjustlocalvars(ls, nparams);
+ f->numparams = cast_byte(fs->nactvar);
+ luaK_reserveregs(fs, fs->nactvar); /* reserve register for parameters */
+}
+
+
+static void body (LexState *ls, expdesc *e, int ismethod, int line) {
+ /* body -> '(' parlist ')' block END */
+ FuncState new_fs;
+ BlockCnt bl;
+ new_fs.f = addprototype(ls);
+ new_fs.f->linedefined = line;
+ open_func(ls, &new_fs, &bl);
+ checknext(ls, '(');
+ if (ismethod) {
+ new_localvarliteral(ls, "self"); /* create 'self' parameter */
+ adjustlocalvars(ls, 1);
+ }
+ parlist(ls);
+ checknext(ls, ')');
+ statlist(ls);
+ new_fs.f->lastlinedefined = ls->linenumber;
+ check_match(ls, TK_END, TK_FUNCTION, line);
+ codeclosure(ls, e);
+ close_func(ls);
+}
+
+
+static int explist (LexState *ls, expdesc *v) {
+ /* explist -> expr { ',' expr } */
+ int n = 1; /* at least one expression */
+ expr(ls, v);
+ while (testnext(ls, ',')) {
+ luaK_exp2nextreg(ls->fs, v);
+ expr(ls, v);
+ n++;
+ }
+ return n;
+}
+
+
+static void funcargs (LexState *ls, expdesc *f, int line) {
+ FuncState *fs = ls->fs;
+ expdesc args;
+ int base, nparams;
+ switch (ls->t.token) {
+ case '(': { /* funcargs -> '(' [ explist ] ')' */
+ luaX_next(ls);
+ if (ls->t.token == ')') /* arg list is empty? */
+ args.k = VVOID;
+ else {
+ explist(ls, &args);
+ luaK_setmultret(fs, &args);
+ }
+ check_match(ls, ')', '(', line);
+ break;
+ }
+ case '{': { /* funcargs -> constructor */
+ constructor(ls, &args);
+ break;
+ }
+ case TK_STRING: { /* funcargs -> STRING */
+ codestring(ls, &args, ls->t.seminfo.ts);
+ luaX_next(ls); /* must use 'seminfo' before 'next' */
+ break;
+ }
+ default: {
+ luaX_syntaxerror(ls, "function arguments expected");
+ }
+ }
+ lua_assert(f->k == VNONRELOC);
+ base = f->u.info; /* base register for call */
+ if (hasmultret(args.k))
+ nparams = LUA_MULTRET; /* open call */
+ else {
+ if (args.k != VVOID)
+ luaK_exp2nextreg(fs, &args); /* close last argument */
+ nparams = fs->freereg - (base+1);
+ }
+ init_exp(f, VCALL, luaK_codeABC(fs, OP_CALL, base, nparams+1, 2));
+ luaK_fixline(fs, line);
+ fs->freereg = base+1; /* call remove function and arguments and leaves
+ (unless changed) one result */
+}
+
+
+
+
+/*
+** {======================================================================
+** Expression parsing
+** =======================================================================
+*/
+
+
+static void primaryexp (LexState *ls, expdesc *v) {
+ /* primaryexp -> NAME | '(' expr ')' */
+ switch (ls->t.token) {
+ case '(': {
+ int line = ls->linenumber;
+ luaX_next(ls);
+ expr(ls, v);
+ check_match(ls, ')', '(', line);
+ luaK_dischargevars(ls->fs, v);
+ return;
+ }
+ case TK_NAME: {
+ singlevar(ls, v);
+ return;
+ }
+ default: {
+ luaX_syntaxerror(ls, "unexpected symbol");
+ }
+ }
+}
+
+
+static void suffixedexp (LexState *ls, expdesc *v) {
+ /* suffixedexp ->
+ primaryexp { '.' NAME | '[' exp ']' | ':' NAME funcargs | funcargs } */
+ FuncState *fs = ls->fs;
+ int line = ls->linenumber;
+ primaryexp(ls, v);
+ for (;;) {
+ switch (ls->t.token) {
+ case '.': { /* fieldsel */
+ fieldsel(ls, v);
+ break;
+ }
+ case '[': { /* '[' exp1 ']' */
+ expdesc key;
+ luaK_exp2anyregup(fs, v);
+ yindex(ls, &key);
+ luaK_indexed(fs, v, &key);
+ break;
+ }
+ case ':': { /* ':' NAME funcargs */
+ expdesc key;
+ luaX_next(ls);
+ checkname(ls, &key);
+ luaK_self(fs, v, &key);
+ funcargs(ls, v, line);
+ break;
+ }
+ case '(': case TK_STRING: case '{': { /* funcargs */
+ luaK_exp2nextreg(fs, v);
+ funcargs(ls, v, line);
+ break;
+ }
+ default: return;
+ }
+ }
+}
+
+
+static void simpleexp (LexState *ls, expdesc *v) {
+ /* simpleexp -> FLT | INT | STRING | NIL | TRUE | FALSE | ... |
+ constructor | FUNCTION body | suffixedexp */
+ switch (ls->t.token) {
+ case TK_FLT: {
+ init_exp(v, VKFLT, 0);
+ v->u.nval = ls->t.seminfo.r;
+ break;
+ }
+ case TK_INT: {
+ init_exp(v, VKINT, 0);
+ v->u.ival = ls->t.seminfo.i;
+ break;
+ }
+ case TK_STRING: {
+ codestring(ls, v, ls->t.seminfo.ts);
+ break;
+ }
+ case TK_NIL: {
+ init_exp(v, VNIL, 0);
+ break;
+ }
+ case TK_TRUE: {
+ init_exp(v, VTRUE, 0);
+ break;
+ }
+ case TK_FALSE: {
+ init_exp(v, VFALSE, 0);
+ break;
+ }
+ case TK_DOTS: { /* vararg */
+ FuncState *fs = ls->fs;
+ check_condition(ls, fs->f->is_vararg,
+ "cannot use '...' outside a vararg function");
+ init_exp(v, VVARARG, luaK_codeABC(fs, OP_VARARG, 0, 1, 0));
+ break;
+ }
+ case '{': { /* constructor */
+ constructor(ls, v);
+ return;
+ }
+ case TK_FUNCTION: {
+ luaX_next(ls);
+ body(ls, v, 0, ls->linenumber);
+ return;
+ }
+ default: {
+ suffixedexp(ls, v);
+ return;
+ }
+ }
+ luaX_next(ls);
+}
+
+
+static UnOpr getunopr (int op) {
+ switch (op) {
+ case TK_NOT: return OPR_NOT;
+ case '-': return OPR_MINUS;
+ case '~': return OPR_BNOT;
+ case '#': return OPR_LEN;
+ default: return OPR_NOUNOPR;
+ }
+}
+
+
+static BinOpr getbinopr (int op) {
+ switch (op) {
+ case '+': return OPR_ADD;
+ case '-': return OPR_SUB;
+ case '*': return OPR_MUL;
+ case '%': return OPR_MOD;
+ case '^': return OPR_POW;
+ case '/': return OPR_DIV;
+ case TK_IDIV: return OPR_IDIV;
+ case '&': return OPR_BAND;
+ case '|': return OPR_BOR;
+ case '~': return OPR_BXOR;
+ case TK_SHL: return OPR_SHL;
+ case TK_SHR: return OPR_SHR;
+ case TK_CONCAT: return OPR_CONCAT;
+ case TK_NE: return OPR_NE;
+ case TK_EQ: return OPR_EQ;
+ case '<': return OPR_LT;
+ case TK_LE: return OPR_LE;
+ case '>': return OPR_GT;
+ case TK_GE: return OPR_GE;
+ case TK_AND: return OPR_AND;
+ case TK_OR: return OPR_OR;
+ default: return OPR_NOBINOPR;
+ }
+}
+
+
+static const struct {
+ lu_byte left; /* left priority for each binary operator */
+ lu_byte right; /* right priority */
+} priority[] = { /* ORDER OPR */
+ {10, 10}, {10, 10}, /* '+' '-' */
+ {11, 11}, {11, 11}, /* '*' '%' */
+ {14, 13}, /* '^' (right associative) */
+ {11, 11}, {11, 11}, /* '/' '//' */
+ {6, 6}, {4, 4}, {5, 5}, /* '&' '|' '~' */
+ {7, 7}, {7, 7}, /* '<<' '>>' */
+ {9, 8}, /* '..' (right associative) */
+ {3, 3}, {3, 3}, {3, 3}, /* ==, <, <= */
+ {3, 3}, {3, 3}, {3, 3}, /* ~=, >, >= */
+ {2, 2}, {1, 1} /* and, or */
+};
+
+#define UNARY_PRIORITY 12 /* priority for unary operators */
+
+
+/*
+** subexpr -> (simpleexp | unop subexpr) { binop subexpr }
+** where 'binop' is any binary operator with a priority higher than 'limit'
+*/
+static BinOpr subexpr (LexState *ls, expdesc *v, int limit) {
+ BinOpr op;
+ UnOpr uop;
+ enterlevel(ls);
+ uop = getunopr(ls->t.token);
+ if (uop != OPR_NOUNOPR) {
+ int line = ls->linenumber;
+ luaX_next(ls);
+ subexpr(ls, v, UNARY_PRIORITY);
+ luaK_prefix(ls->fs, uop, v, line);
+ }
+ else simpleexp(ls, v);
+ /* expand while operators have priorities higher than 'limit' */
+ op = getbinopr(ls->t.token);
+ while (op != OPR_NOBINOPR && priority[op].left > limit) {
+ expdesc v2;
+ BinOpr nextop;
+ int line = ls->linenumber;
+ luaX_next(ls);
+ luaK_infix(ls->fs, op, v);
+ /* read sub-expression with higher priority */
+ nextop = subexpr(ls, &v2, priority[op].right);
+ luaK_posfix(ls->fs, op, v, &v2, line);
+ op = nextop;
+ }
+ leavelevel(ls);
+ return op; /* return first untreated operator */
+}
+
+
+static void expr (LexState *ls, expdesc *v) {
+ subexpr(ls, v, 0);
+}
+
+/* }==================================================================== */
+
+
+
+/*
+** {======================================================================
+** Rules for Statements
+** =======================================================================
+*/
+
+
+static void block (LexState *ls) {
+ /* block -> statlist */
+ FuncState *fs = ls->fs;
+ BlockCnt bl;
+ enterblock(fs, &bl, 0);
+ statlist(ls);
+ leaveblock(fs);
+}
+
+
+/*
+** structure to chain all variables in the left-hand side of an
+** assignment
+*/
+struct LHS_assign {
+ struct LHS_assign *prev;
+ expdesc v; /* variable (global, local, upvalue, or indexed) */
+};
+
+
+/*
+** check whether, in an assignment to an upvalue/local variable, the
+** upvalue/local variable is begin used in a previous assignment to a
+** table. If so, save original upvalue/local value in a safe place and
+** use this safe copy in the previous assignment.
+*/
+static void check_conflict (LexState *ls, struct LHS_assign *lh, expdesc *v) {
+ FuncState *fs = ls->fs;
+ int extra = fs->freereg; /* eventual position to save local variable */
+ int conflict = 0;
+ for (; lh; lh = lh->prev) { /* check all previous assignments */
+ if (lh->v.k == VINDEXED) { /* assigning to a table? */
+ /* table is the upvalue/local being assigned now? */
+ if (lh->v.u.ind.vt == v->k && lh->v.u.ind.t == v->u.info) {
+ conflict = 1;
+ lh->v.u.ind.vt = VLOCAL;
+ lh->v.u.ind.t = extra; /* previous assignment will use safe copy */
+ }
+ /* index is the local being assigned? (index cannot be upvalue) */
+ if (v->k == VLOCAL && lh->v.u.ind.idx == v->u.info) {
+ conflict = 1;
+ lh->v.u.ind.idx = extra; /* previous assignment will use safe copy */
+ }
+ }
+ }
+ if (conflict) {
+ /* copy upvalue/local value to a temporary (in position 'extra') */
+ OpCode op = (v->k == VLOCAL) ? OP_MOVE : OP_GETUPVAL;
+ luaK_codeABC(fs, op, extra, v->u.info, 0);
+ luaK_reserveregs(fs, 1);
+ }
+}
+
+
+static void assignment (LexState *ls, struct LHS_assign *lh, int nvars) {
+ expdesc e;
+ check_condition(ls, vkisvar(lh->v.k), "syntax error");
+ if (testnext(ls, ',')) { /* assignment -> ',' suffixedexp assignment */
+ struct LHS_assign nv;
+ nv.prev = lh;
+ suffixedexp(ls, &nv.v);
+ if (nv.v.k != VINDEXED)
+ check_conflict(ls, lh, &nv.v);
+ checklimit(ls->fs, nvars + ls->L->nCcalls, LUAI_MAXCCALLS,
+ "C levels");
+ assignment(ls, &nv, nvars+1);
+ }
+ else { /* assignment -> '=' explist */
+ int nexps;
+ checknext(ls, '=');
+ nexps = explist(ls, &e);
+ if (nexps != nvars)
+ adjust_assign(ls, nvars, nexps, &e);
+ else {
+ luaK_setoneret(ls->fs, &e); /* close last expression */
+ luaK_storevar(ls->fs, &lh->v, &e);
+ return; /* avoid default */
+ }
+ }
+ init_exp(&e, VNONRELOC, ls->fs->freereg-1); /* default assignment */
+ luaK_storevar(ls->fs, &lh->v, &e);
+}
+
+
+static int cond (LexState *ls) {
+ /* cond -> exp */
+ expdesc v;
+ expr(ls, &v); /* read condition */
+ if (v.k == VNIL) v.k = VFALSE; /* 'falses' are all equal here */
+ luaK_goiftrue(ls->fs, &v);
+ return v.f;
+}
+
+
+static void gotostat (LexState *ls, int pc) {
+ int line = ls->linenumber;
+ TString *label;
+ int g;
+ if (testnext(ls, TK_GOTO))
+ label = str_checkname(ls);
+ else {
+ luaX_next(ls); /* skip break */
+ label = luaS_new(ls->L, "break");
+ }
+ g = newlabelentry(ls, &ls->dyd->gt, label, line, pc);
+ findlabel(ls, g); /* close it if label already defined */
+}
+
+
+/* check for repeated labels on the same block */
+static void checkrepeated (FuncState *fs, Labellist *ll, TString *label) {
+ int i;
+ for (i = fs->bl->firstlabel; i < ll->n; i++) {
+ if (eqstr(label, ll->arr[i].name)) {
+ const char *msg = luaO_pushfstring(fs->ls->L,
+ "label '%s' already defined on line %d",
+ getstr(label), ll->arr[i].line);
+ semerror(fs->ls, msg);
+ }
+ }
+}
+
+
+/* skip no-op statements */
+static void skipnoopstat (LexState *ls) {
+ while (ls->t.token == ';' || ls->t.token == TK_DBCOLON)
+ statement(ls);
+}
+
+
+static void labelstat (LexState *ls, TString *label, int line) {
+ /* label -> '::' NAME '::' */
+ FuncState *fs = ls->fs;
+ Labellist *ll = &ls->dyd->label;
+ int l; /* index of new label being created */
+ checkrepeated(fs, ll, label); /* check for repeated labels */
+ checknext(ls, TK_DBCOLON); /* skip double colon */
+ /* create new entry for this label */
+ l = newlabelentry(ls, ll, label, line, luaK_getlabel(fs));
+ skipnoopstat(ls); /* skip other no-op statements */
+ if (block_follow(ls, 0)) { /* label is last no-op statement in the block? */
+ /* assume that locals are already out of scope */
+ ll->arr[l].nactvar = fs->bl->nactvar;
+ }
+ findgotos(ls, &ll->arr[l]);
+}
+
+
+static void whilestat (LexState *ls, int line) {
+ /* whilestat -> WHILE cond DO block END */
+ FuncState *fs = ls->fs;
+ int whileinit;
+ int condexit;
+ BlockCnt bl;
+ luaX_next(ls); /* skip WHILE */
+ whileinit = luaK_getlabel(fs);
+ condexit = cond(ls);
+ enterblock(fs, &bl, 1);
+ checknext(ls, TK_DO);
+ block(ls);
+ luaK_jumpto(fs, whileinit);
+ check_match(ls, TK_END, TK_WHILE, line);
+ leaveblock(fs);
+ luaK_patchtohere(fs, condexit); /* false conditions finish the loop */
+}
+
+
+static void repeatstat (LexState *ls, int line) {
+ /* repeatstat -> REPEAT block UNTIL cond */
+ int condexit;
+ FuncState *fs = ls->fs;
+ int repeat_init = luaK_getlabel(fs);
+ BlockCnt bl1, bl2;
+ enterblock(fs, &bl1, 1); /* loop block */
+ enterblock(fs, &bl2, 0); /* scope block */
+ luaX_next(ls); /* skip REPEAT */
+ statlist(ls);
+ check_match(ls, TK_UNTIL, TK_REPEAT, line);
+ condexit = cond(ls); /* read condition (inside scope block) */
+ if (bl2.upval) /* upvalues? */
+ luaK_patchclose(fs, condexit, bl2.nactvar);
+ leaveblock(fs); /* finish scope */
+ luaK_patchlist(fs, condexit, repeat_init); /* close the loop */
+ leaveblock(fs); /* finish loop */
+}
+
+
+static int exp1 (LexState *ls) {
+ expdesc e;
+ int reg;
+ expr(ls, &e);
+ luaK_exp2nextreg(ls->fs, &e);
+ lua_assert(e.k == VNONRELOC);
+ reg = e.u.info;
+ return reg;
+}
+
+
+static void forbody (LexState *ls, int base, int line, int nvars, int isnum) {
+ /* forbody -> DO block */
+ BlockCnt bl;
+ FuncState *fs = ls->fs;
+ int prep, endfor;
+ adjustlocalvars(ls, 3); /* control variables */
+ checknext(ls, TK_DO);
+ prep = isnum ? luaK_codeAsBx(fs, OP_FORPREP, base, NO_JUMP) : luaK_jump(fs);
+ enterblock(fs, &bl, 0); /* scope for declared variables */
+ adjustlocalvars(ls, nvars);
+ luaK_reserveregs(fs, nvars);
+ block(ls);
+ leaveblock(fs); /* end of scope for declared variables */
+ luaK_patchtohere(fs, prep);
+ if (isnum) /* numeric for? */
+ endfor = luaK_codeAsBx(fs, OP_FORLOOP, base, NO_JUMP);
+ else { /* generic for */
+ luaK_codeABC(fs, OP_TFORCALL, base, 0, nvars);
+ luaK_fixline(fs, line);
+ endfor = luaK_codeAsBx(fs, OP_TFORLOOP, base + 2, NO_JUMP);
+ }
+ luaK_patchlist(fs, endfor, prep + 1);
+ luaK_fixline(fs, line);
+}
+
+
+static void fornum (LexState *ls, TString *varname, int line) {
+ /* fornum -> NAME = exp1,exp1[,exp1] forbody */
+ FuncState *fs = ls->fs;
+ int base = fs->freereg;
+ new_localvarliteral(ls, "(for index)");
+ new_localvarliteral(ls, "(for limit)");
+ new_localvarliteral(ls, "(for step)");
+ new_localvar(ls, varname);
+ checknext(ls, '=');
+ exp1(ls); /* initial value */
+ checknext(ls, ',');
+ exp1(ls); /* limit */
+ if (testnext(ls, ','))
+ exp1(ls); /* optional step */
+ else { /* default step = 1 */
+ luaK_codek(fs, fs->freereg, luaK_intK(fs, 1));
+ luaK_reserveregs(fs, 1);
+ }
+ forbody(ls, base, line, 1, 1);
+}
+
+
+static void forlist (LexState *ls, TString *indexname) {
+ /* forlist -> NAME {,NAME} IN explist forbody */
+ FuncState *fs = ls->fs;
+ expdesc e;
+ int nvars = 4; /* gen, state, control, plus at least one declared var */
+ int line;
+ int base = fs->freereg;
+ /* create control variables */
+ new_localvarliteral(ls, "(for generator)");
+ new_localvarliteral(ls, "(for state)");
+ new_localvarliteral(ls, "(for control)");
+ /* create declared variables */
+ new_localvar(ls, indexname);
+ while (testnext(ls, ',')) {
+ new_localvar(ls, str_checkname(ls));
+ nvars++;
+ }
+ checknext(ls, TK_IN);
+ line = ls->linenumber;
+ adjust_assign(ls, 3, explist(ls, &e), &e);
+ luaK_checkstack(fs, 3); /* extra space to call generator */
+ forbody(ls, base, line, nvars - 3, 0);
+}
+
+
+static void forstat (LexState *ls, int line) {
+ /* forstat -> FOR (fornum | forlist) END */
+ FuncState *fs = ls->fs;
+ TString *varname;
+ BlockCnt bl;
+ enterblock(fs, &bl, 1); /* scope for loop and control variables */
+ luaX_next(ls); /* skip 'for' */
+ varname = str_checkname(ls); /* first variable name */
+ switch (ls->t.token) {
+ case '=': fornum(ls, varname, line); break;
+ case ',': case TK_IN: forlist(ls, varname); break;
+ default: luaX_syntaxerror(ls, "'=' or 'in' expected");
+ }
+ check_match(ls, TK_END, TK_FOR, line);
+ leaveblock(fs); /* loop scope ('break' jumps to this point) */
+}
+
+
+static void test_then_block (LexState *ls, int *escapelist) {
+ /* test_then_block -> [IF | ELSEIF] cond THEN block */
+ BlockCnt bl;
+ FuncState *fs = ls->fs;
+ expdesc v;
+ int jf; /* instruction to skip 'then' code (if condition is false) */
+ luaX_next(ls); /* skip IF or ELSEIF */
+ expr(ls, &v); /* read condition */
+ checknext(ls, TK_THEN);
+ if (ls->t.token == TK_GOTO || ls->t.token == TK_BREAK) {
+ luaK_goiffalse(ls->fs, &v); /* will jump to label if condition is true */
+ enterblock(fs, &bl, 0); /* must enter block before 'goto' */
+ gotostat(ls, v.t); /* handle goto/break */
+ skipnoopstat(ls); /* skip other no-op statements */
+ if (block_follow(ls, 0)) { /* 'goto' is the entire block? */
+ leaveblock(fs);
+ return; /* and that is it */
+ }
+ else /* must skip over 'then' part if condition is false */
+ jf = luaK_jump(fs);
+ }
+ else { /* regular case (not goto/break) */
+ luaK_goiftrue(ls->fs, &v); /* skip over block if condition is false */
+ enterblock(fs, &bl, 0);
+ jf = v.f;
+ }
+ statlist(ls); /* 'then' part */
+ leaveblock(fs);
+ if (ls->t.token == TK_ELSE ||
+ ls->t.token == TK_ELSEIF) /* followed by 'else'/'elseif'? */
+ luaK_concat(fs, escapelist, luaK_jump(fs)); /* must jump over it */
+ luaK_patchtohere(fs, jf);
+}
+
+
+static void ifstat (LexState *ls, int line) {
+ /* ifstat -> IF cond THEN block {ELSEIF cond THEN block} [ELSE block] END */
+ FuncState *fs = ls->fs;
+ int escapelist = NO_JUMP; /* exit list for finished parts */
+ test_then_block(ls, &escapelist); /* IF cond THEN block */
+ while (ls->t.token == TK_ELSEIF)
+ test_then_block(ls, &escapelist); /* ELSEIF cond THEN block */
+ if (testnext(ls, TK_ELSE))
+ block(ls); /* 'else' part */
+ check_match(ls, TK_END, TK_IF, line);
+ luaK_patchtohere(fs, escapelist); /* patch escape list to 'if' end */
+}
+
+
+static void localfunc (LexState *ls) {
+ expdesc b;
+ FuncState *fs = ls->fs;
+ new_localvar(ls, str_checkname(ls)); /* new local variable */
+ adjustlocalvars(ls, 1); /* enter its scope */
+ body(ls, &b, 0, ls->linenumber); /* function created in next register */
+ /* debug information will only see the variable after this point! */
+ getlocvar(fs, b.u.info)->startpc = fs->pc;
+}
+
+
+static void localstat (LexState *ls) {
+ /* stat -> LOCAL NAME {',' NAME} ['=' explist] */
+ int nvars = 0;
+ int nexps;
+ expdesc e;
+ do {
+ new_localvar(ls, str_checkname(ls));
+ nvars++;
+ } while (testnext(ls, ','));
+ if (testnext(ls, '='))
+ nexps = explist(ls, &e);
+ else {
+ e.k = VVOID;
+ nexps = 0;
+ }
+ adjust_assign(ls, nvars, nexps, &e);
+ adjustlocalvars(ls, nvars);
+}
+
+
+static int funcname (LexState *ls, expdesc *v) {
+ /* funcname -> NAME {fieldsel} [':' NAME] */
+ int ismethod = 0;
+ singlevar(ls, v);
+ while (ls->t.token == '.')
+ fieldsel(ls, v);
+ if (ls->t.token == ':') {
+ ismethod = 1;
+ fieldsel(ls, v);
+ }
+ return ismethod;
+}
+
+
+static void funcstat (LexState *ls, int line) {
+ /* funcstat -> FUNCTION funcname body */
+ int ismethod;
+ expdesc v, b;
+ luaX_next(ls); /* skip FUNCTION */
+ ismethod = funcname(ls, &v);
+ body(ls, &b, ismethod, line);
+ luaK_storevar(ls->fs, &v, &b);
+ luaK_fixline(ls->fs, line); /* definition "happens" in the first line */
+}
+
+
+static void exprstat (LexState *ls) {
+ /* stat -> func | assignment */
+ FuncState *fs = ls->fs;
+ struct LHS_assign v;
+ suffixedexp(ls, &v.v);
+ if (ls->t.token == '=' || ls->t.token == ',') { /* stat -> assignment ? */
+ v.prev = NULL;
+ assignment(ls, &v, 1);
+ }
+ else { /* stat -> func */
+ check_condition(ls, v.v.k == VCALL, "syntax error");
+ SETARG_C(getinstruction(fs, &v.v), 1); /* call statement uses no results */
+ }
+}
+
+
+static void retstat (LexState *ls) {
+ /* stat -> RETURN [explist] [';'] */
+ FuncState *fs = ls->fs;
+ expdesc e;
+ int first, nret; /* registers with returned values */
+ if (block_follow(ls, 1) || ls->t.token == ';')
+ first = nret = 0; /* return no values */
+ else {
+ nret = explist(ls, &e); /* optional return values */
+ if (hasmultret(e.k)) {
+ luaK_setmultret(fs, &e);
+ if (e.k == VCALL && nret == 1) { /* tail call? */
+ SET_OPCODE(getinstruction(fs,&e), OP_TAILCALL);
+ lua_assert(GETARG_A(getinstruction(fs,&e)) == fs->nactvar);
+ }
+ first = fs->nactvar;
+ nret = LUA_MULTRET; /* return all values */
+ }
+ else {
+ if (nret == 1) /* only one single value? */
+ first = luaK_exp2anyreg(fs, &e);
+ else {
+ luaK_exp2nextreg(fs, &e); /* values must go to the stack */
+ first = fs->nactvar; /* return all active values */
+ lua_assert(nret == fs->freereg - first);
+ }
+ }
+ }
+ luaK_ret(fs, first, nret);
+ testnext(ls, ';'); /* skip optional semicolon */
+}
+
+
+static void statement (LexState *ls) {
+ int line = ls->linenumber; /* may be needed for error messages */
+ enterlevel(ls);
+ switch (ls->t.token) {
+ case ';': { /* stat -> ';' (empty statement) */
+ luaX_next(ls); /* skip ';' */
+ break;
+ }
+ case TK_IF: { /* stat -> ifstat */
+ ifstat(ls, line);
+ break;
+ }
+ case TK_WHILE: { /* stat -> whilestat */
+ whilestat(ls, line);
+ break;
+ }
+ case TK_DO: { /* stat -> DO block END */
+ luaX_next(ls); /* skip DO */
+ block(ls);
+ check_match(ls, TK_END, TK_DO, line);
+ break;
+ }
+ case TK_FOR: { /* stat -> forstat */
+ forstat(ls, line);
+ break;
+ }
+ case TK_REPEAT: { /* stat -> repeatstat */
+ repeatstat(ls, line);
+ break;
+ }
+ case TK_FUNCTION: { /* stat -> funcstat */
+ funcstat(ls, line);
+ break;
+ }
+ case TK_LOCAL: { /* stat -> localstat */
+ luaX_next(ls); /* skip LOCAL */
+ if (testnext(ls, TK_FUNCTION)) /* local function? */
+ localfunc(ls);
+ else
+ localstat(ls);
+ break;
+ }
+ case TK_DBCOLON: { /* stat -> label */
+ luaX_next(ls); /* skip double colon */
+ labelstat(ls, str_checkname(ls), line);
+ break;
+ }
+ case TK_RETURN: { /* stat -> retstat */
+ luaX_next(ls); /* skip RETURN */
+ retstat(ls);
+ break;
+ }
+ case TK_BREAK: /* stat -> breakstat */
+ case TK_GOTO: { /* stat -> 'goto' NAME */
+ gotostat(ls, luaK_jump(ls->fs));
+ break;
+ }
+ default: { /* stat -> func | assignment */
+ exprstat(ls);
+ break;
+ }
+ }
+ lua_assert(ls->fs->f->maxstacksize >= ls->fs->freereg &&
+ ls->fs->freereg >= ls->fs->nactvar);
+ ls->fs->freereg = ls->fs->nactvar; /* free registers */
+ leavelevel(ls);
+}
+
+/* }====================================================================== */
+
+
+/*
+** compiles the main function, which is a regular vararg function with an
+** upvalue named LUA_ENV
+*/
+static void mainfunc (LexState *ls, FuncState *fs) {
+ BlockCnt bl;
+ expdesc v;
+ open_func(ls, fs, &bl);
+ fs->f->is_vararg = 1; /* main function is always declared vararg */
+ init_exp(&v, VLOCAL, 0); /* create and... */
+ newupvalue(fs, ls->envn, &v); /* ...set environment upvalue */
+ luaX_next(ls); /* read first token */
+ statlist(ls); /* parse main body */
+ check(ls, TK_EOS);
+ close_func(ls);
+}
+
+
+LClosure *luaY_parser (lua_State *L, ZIO *z, Mbuffer *buff,
+ Dyndata *dyd, const char *name, int firstchar) {
+ LexState lexstate;
+ FuncState funcstate;
+ LClosure *cl = luaF_newLclosure(L, 1); /* create main closure */
+ setclLvalue(L, L->top, cl); /* anchor it (to avoid being collected) */
+ luaD_inctop(L);
+ lexstate.h = luaH_new(L); /* create table for scanner */
+ sethvalue(L, L->top, lexstate.h); /* anchor it */
+ luaD_inctop(L);
+ funcstate.f = cl->p = luaF_newproto(L);
+ funcstate.f->source = luaS_new(L, name); /* create and anchor TString */
+ lua_assert(iswhite(funcstate.f)); /* do not need barrier here */
+ lexstate.buff = buff;
+ lexstate.dyd = dyd;
+ dyd->actvar.n = dyd->gt.n = dyd->label.n = 0;
+ luaX_setinput(L, &lexstate, z, funcstate.f->source, firstchar);
+ mainfunc(&lexstate, &funcstate);
+ lua_assert(!funcstate.prev && funcstate.nups == 1 && !lexstate.fs);
+ /* all scopes should be correctly finished */
+ lua_assert(dyd->actvar.n == 0 && dyd->gt.n == 0 && dyd->label.n == 0);
+ L->top--; /* remove scanner's table */
+ return cl; /* closure is on the stack, too */
+}
+
diff --git a/lua/src/lparser.h b/lua/src/lparser.h
new file mode 100644
index 000000000..02e9b03ae
--- /dev/null
+++ b/lua/src/lparser.h
@@ -0,0 +1,133 @@
+/*
+** $Id: lparser.h,v 1.76 2015/12/30 18:16:13 roberto Exp $
+** Lua Parser
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lparser_h
+#define lparser_h
+
+#include "llimits.h"
+#include "lobject.h"
+#include "lzio.h"
+
+
+/*
+** Expression and variable descriptor.
+** Code generation for variables and expressions can be delayed to allow
+** optimizations; An 'expdesc' structure describes a potentially-delayed
+** variable/expression. It has a description of its "main" value plus a
+** list of conditional jumps that can also produce its value (generated
+** by short-circuit operators 'and'/'or').
+*/
+
+/* kinds of variables/expressions */
+typedef enum {
+ VVOID, /* when 'expdesc' describes the last expression a list,
+ this kind means an empty list (so, no expression) */
+ VNIL, /* constant nil */
+ VTRUE, /* constant true */
+ VFALSE, /* constant false */
+ VK, /* constant in 'k'; info = index of constant in 'k' */
+ VKFLT, /* floating constant; nval = numerical float value */
+ VKINT, /* integer constant; nval = numerical integer value */
+ VNONRELOC, /* expression has its value in a fixed register;
+ info = result register */
+ VLOCAL, /* local variable; info = local register */
+ VUPVAL, /* upvalue variable; info = index of upvalue in 'upvalues' */
+ VINDEXED, /* indexed variable;
+ ind.vt = whether 't' is register or upvalue;
+ ind.t = table register or upvalue;
+ ind.idx = key's R/K index */
+ VJMP, /* expression is a test/comparison;
+ info = pc of corresponding jump instruction */
+ VRELOCABLE, /* expression can put result in any register;
+ info = instruction pc */
+ VCALL, /* expression is a function call; info = instruction pc */
+ VVARARG /* vararg expression; info = instruction pc */
+} expkind;
+
+
+#define vkisvar(k) (VLOCAL <= (k) && (k) <= VINDEXED)
+#define vkisinreg(k) ((k) == VNONRELOC || (k) == VLOCAL)
+
+typedef struct expdesc {
+ expkind k;
+ union {
+ lua_Integer ival; /* for VKINT */
+ lua_Number nval; /* for VKFLT */
+ int info; /* for generic use */
+ struct { /* for indexed variables (VINDEXED) */
+ short idx; /* index (R/K) */
+ lu_byte t; /* table (register or upvalue) */
+ lu_byte vt; /* whether 't' is register (VLOCAL) or upvalue (VUPVAL) */
+ } ind;
+ } u;
+ int t; /* patch list of 'exit when true' */
+ int f; /* patch list of 'exit when false' */
+} expdesc;
+
+
+/* description of active local variable */
+typedef struct Vardesc {
+ short idx; /* variable index in stack */
+} Vardesc;
+
+
+/* description of pending goto statements and label statements */
+typedef struct Labeldesc {
+ TString *name; /* label identifier */
+ int pc; /* position in code */
+ int line; /* line where it appeared */
+ lu_byte nactvar; /* local level where it appears in current block */
+} Labeldesc;
+
+
+/* list of labels or gotos */
+typedef struct Labellist {
+ Labeldesc *arr; /* array */
+ int n; /* number of entries in use */
+ int size; /* array size */
+} Labellist;
+
+
+/* dynamic structures used by the parser */
+typedef struct Dyndata {
+ struct { /* list of active local variables */
+ Vardesc *arr;
+ int n;
+ int size;
+ } actvar;
+ Labellist gt; /* list of pending gotos */
+ Labellist label; /* list of active labels */
+} Dyndata;
+
+
+/* control of blocks */
+struct BlockCnt; /* defined in lparser.c */
+
+
+/* state needed to generate code for a given function */
+typedef struct FuncState {
+ Proto *f; /* current function header */
+ struct FuncState *prev; /* enclosing function */
+ struct LexState *ls; /* lexical state */
+ struct BlockCnt *bl; /* chain of current blocks */
+ int pc; /* next position to code (equivalent to 'ncode') */
+ int lasttarget; /* 'label' of last 'jump label' */
+ int jpc; /* list of pending jumps to 'pc' */
+ int nk; /* number of elements in 'k' */
+ int np; /* number of elements in 'p' */
+ int firstlocal; /* index of first local var (in Dyndata array) */
+ short nlocvars; /* number of elements in 'f->locvars' */
+ lu_byte nactvar; /* number of active local variables */
+ lu_byte nups; /* number of upvalues */
+ lu_byte freereg; /* first free register */
+} FuncState;
+
+
+LUAI_FUNC LClosure *luaY_parser (lua_State *L, ZIO *z, Mbuffer *buff,
+ Dyndata *dyd, const char *name, int firstchar);
+
+
+#endif
diff --git a/lua/src/lpcap.c b/lua/src/lpcap.c
new file mode 100644
index 000000000..c9085de06
--- /dev/null
+++ b/lua/src/lpcap.c
@@ -0,0 +1,537 @@
+/*
+** $Id: lpcap.c,v 1.6 2015/06/15 16:09:57 roberto Exp $
+** Copyright 2007, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+*/
+
+#include "lua.h"
+#include "lauxlib.h"
+
+#include "lpcap.h"
+#include "lptypes.h"
+
+
+#define captype(cap) ((cap)->kind)
+
+#define isclosecap(cap) (captype(cap) == Cclose)
+
+#define closeaddr(c) ((c)->s + (c)->siz - 1)
+
+#define isfullcap(cap) ((cap)->siz != 0)
+
+#define getfromktable(cs,v) lua_rawgeti((cs)->L, ktableidx((cs)->ptop), v)
+
+#define pushluaval(cs) getfromktable(cs, (cs)->cap->idx)
+
+
+
+/*
+** Put at the cache for Lua values the value indexed by 'v' in ktable
+** of the running pattern (if it is not there yet); returns its index.
+*/
+static int updatecache (CapState *cs, int v) {
+ int idx = cs->ptop + 1; /* stack index of cache for Lua values */
+ if (v != cs->valuecached) { /* not there? */
+ getfromktable(cs, v); /* get value from 'ktable' */
+ lua_replace(cs->L, idx); /* put it at reserved stack position */
+ cs->valuecached = v; /* keep track of what is there */
+ }
+ return idx;
+}
+
+
+static int pushcapture (CapState *cs);
+
+
+/*
+** Goes back in a list of captures looking for an open capture
+** corresponding to a close
+*/
+static Capture *findopen (Capture *cap) {
+ int n = 0; /* number of closes waiting an open */
+ for (;;) {
+ cap--;
+ if (isclosecap(cap)) n++; /* one more open to skip */
+ else if (!isfullcap(cap))
+ if (n-- == 0) return cap;
+ }
+}
+
+
+/*
+** Go to the next capture
+*/
+static void nextcap (CapState *cs) {
+ Capture *cap = cs->cap;
+ if (!isfullcap(cap)) { /* not a single capture? */
+ int n = 0; /* number of opens waiting a close */
+ for (;;) { /* look for corresponding close */
+ cap++;
+ if (isclosecap(cap)) {
+ if (n-- == 0) break;
+ }
+ else if (!isfullcap(cap)) n++;
+ }
+ }
+ cs->cap = cap + 1; /* + 1 to skip last close (or entire single capture) */
+}
+
+
+/*
+** Push on the Lua stack all values generated by nested captures inside
+** the current capture. Returns number of values pushed. 'addextra'
+** makes it push the entire match after all captured values. The
+** entire match is pushed also if there are no other nested values,
+** so the function never returns zero.
+*/
+static int pushnestedvalues (CapState *cs, int addextra) {
+ Capture *co = cs->cap;
+ if (isfullcap(cs->cap++)) { /* no nested captures? */
+ lua_pushlstring(cs->L, co->s, co->siz - 1); /* push whole match */
+ return 1; /* that is it */
+ }
+ else {
+ int n = 0;
+ while (!isclosecap(cs->cap)) /* repeat for all nested patterns */
+ n += pushcapture(cs);
+ if (addextra || n == 0) { /* need extra? */
+ lua_pushlstring(cs->L, co->s, cs->cap->s - co->s); /* push whole match */
+ n++;
+ }
+ cs->cap++; /* skip close entry */
+ return n;
+ }
+}
+
+
+/*
+** Push only the first value generated by nested captures
+*/
+static void pushonenestedvalue (CapState *cs) {
+ int n = pushnestedvalues(cs, 0);
+ if (n > 1)
+ lua_pop(cs->L, n - 1); /* pop extra values */
+}
+
+
+/*
+** Try to find a named group capture with the name given at the top of
+** the stack; goes backward from 'cap'.
+*/
+static Capture *findback (CapState *cs, Capture *cap) {
+ lua_State *L = cs->L;
+ while (cap-- > cs->ocap) { /* repeat until end of list */
+ if (isclosecap(cap))
+ cap = findopen(cap); /* skip nested captures */
+ else if (!isfullcap(cap))
+ continue; /* opening an enclosing capture: skip and get previous */
+ if (captype(cap) == Cgroup) {
+ getfromktable(cs, cap->idx); /* get group name */
+ if (lp_equal(L, -2, -1)) { /* right group? */
+ lua_pop(L, 2); /* remove reference name and group name */
+ return cap;
+ }
+ else lua_pop(L, 1); /* remove group name */
+ }
+ }
+ luaL_error(L, "back reference '%s' not found", lua_tostring(L, -1));
+ return NULL; /* to avoid warnings */
+}
+
+
+/*
+** Back-reference capture. Return number of values pushed.
+*/
+static int backrefcap (CapState *cs) {
+ int n;
+ Capture *curr = cs->cap;
+ pushluaval(cs); /* reference name */
+ cs->cap = findback(cs, curr); /* find corresponding group */
+ n = pushnestedvalues(cs, 0); /* push group's values */
+ cs->cap = curr + 1;
+ return n;
+}
+
+
+/*
+** Table capture: creates a new table and populates it with nested
+** captures.
+*/
+static int tablecap (CapState *cs) {
+ lua_State *L = cs->L;
+ int n = 0;
+ lua_newtable(L);
+ if (isfullcap(cs->cap++))
+ return 1; /* table is empty */
+ while (!isclosecap(cs->cap)) {
+ if (captype(cs->cap) == Cgroup && cs->cap->idx != 0) { /* named group? */
+ pushluaval(cs); /* push group name */
+ pushonenestedvalue(cs);
+ lua_settable(L, -3);
+ }
+ else { /* not a named group */
+ int i;
+ int k = pushcapture(cs);
+ for (i = k; i > 0; i--) /* store all values into table */
+ lua_rawseti(L, -(i + 1), n + i);
+ n += k;
+ }
+ }
+ cs->cap++; /* skip close entry */
+ return 1; /* number of values pushed (only the table) */
+}
+
+
+/*
+** Table-query capture
+*/
+static int querycap (CapState *cs) {
+ int idx = cs->cap->idx;
+ pushonenestedvalue(cs); /* get nested capture */
+ lua_gettable(cs->L, updatecache(cs, idx)); /* query cap. value at table */
+ if (!lua_isnil(cs->L, -1))
+ return 1;
+ else { /* no value */
+ lua_pop(cs->L, 1); /* remove nil */
+ return 0;
+ }
+}
+
+
+/*
+** Fold capture
+*/
+static int foldcap (CapState *cs) {
+ int n;
+ lua_State *L = cs->L;
+ int idx = cs->cap->idx;
+ if (isfullcap(cs->cap++) || /* no nested captures? */
+ isclosecap(cs->cap) || /* no nested captures (large subject)? */
+ (n = pushcapture(cs)) == 0) /* nested captures with no values? */
+ return luaL_error(L, "no initial value for fold capture");
+ if (n > 1)
+ lua_pop(L, n - 1); /* leave only one result for accumulator */
+ while (!isclosecap(cs->cap)) {
+ lua_pushvalue(L, updatecache(cs, idx)); /* get folding function */
+ lua_insert(L, -2); /* put it before accumulator */
+ n = pushcapture(cs); /* get next capture's values */
+ lua_call(L, n + 1, 1); /* call folding function */
+ }
+ cs->cap++; /* skip close entry */
+ return 1; /* only accumulator left on the stack */
+}
+
+
+/*
+** Function capture
+*/
+static int functioncap (CapState *cs) {
+ int n;
+ int top = lua_gettop(cs->L);
+ pushluaval(cs); /* push function */
+ n = pushnestedvalues(cs, 0); /* push nested captures */
+ lua_call(cs->L, n, LUA_MULTRET); /* call function */
+ return lua_gettop(cs->L) - top; /* return function's results */
+}
+
+
+/*
+** Select capture
+*/
+static int numcap (CapState *cs) {
+ int idx = cs->cap->idx; /* value to select */
+ if (idx == 0) { /* no values? */
+ nextcap(cs); /* skip entire capture */
+ return 0; /* no value produced */
+ }
+ else {
+ int n = pushnestedvalues(cs, 0);
+ if (n < idx) /* invalid index? */
+ return luaL_error(cs->L, "no capture '%d'", idx);
+ else {
+ lua_pushvalue(cs->L, -(n - idx + 1)); /* get selected capture */
+ lua_replace(cs->L, -(n + 1)); /* put it in place of 1st capture */
+ lua_pop(cs->L, n - 1); /* remove other captures */
+ return 1;
+ }
+ }
+}
+
+
+/*
+** Return the stack index of the first runtime capture in the given
+** list of captures (or zero if no runtime captures)
+*/
+int finddyncap (Capture *cap, Capture *last) {
+ for (; cap < last; cap++) {
+ if (cap->kind == Cruntime)
+ return cap->idx; /* stack position of first capture */
+ }
+ return 0; /* no dynamic captures in this segment */
+}
+
+
+/*
+** Calls a runtime capture. Returns number of captures removed by
+** the call, including the initial Cgroup. (Captures to be added are
+** on the Lua stack.)
+*/
+int runtimecap (CapState *cs, Capture *close, const char *s, int *rem) {
+ int n, id;
+ lua_State *L = cs->L;
+ int otop = lua_gettop(L);
+ Capture *open = findopen(close);
+ assert(captype(open) == Cgroup);
+ id = finddyncap(open, close); /* get first dynamic capture argument */
+ close->kind = Cclose; /* closes the group */
+ close->s = s;
+ cs->cap = open; cs->valuecached = 0; /* prepare capture state */
+ luaL_checkstack(L, 4, "too many runtime captures");
+ pushluaval(cs); /* push function to be called */
+ lua_pushvalue(L, SUBJIDX); /* push original subject */
+ lua_pushinteger(L, s - cs->s + 1); /* push current position */
+ n = pushnestedvalues(cs, 0); /* push nested captures */
+ lua_call(L, n + 2, LUA_MULTRET); /* call dynamic function */
+ if (id > 0) { /* are there old dynamic captures to be removed? */
+ int i;
+ for (i = id; i <= otop; i++)
+ lua_remove(L, id); /* remove old dynamic captures */
+ *rem = otop - id + 1; /* total number of dynamic captures removed */
+ }
+ else
+ *rem = 0; /* no dynamic captures removed */
+ return close - open; /* number of captures of all kinds removed */
+}
+
+
+/*
+** Auxiliary structure for substitution and string captures: keep
+** information about nested captures for future use, avoiding to push
+** string results into Lua
+*/
+typedef struct StrAux {
+ int isstring; /* whether capture is a string */
+ union {
+ Capture *cp; /* if not a string, respective capture */
+ struct { /* if it is a string... */
+ const char *s; /* ... starts here */
+ const char *e; /* ... ends here */
+ } s;
+ } u;
+} StrAux;
+
+#define MAXSTRCAPS 10
+
+/*
+** Collect values from current capture into array 'cps'. Current
+** capture must be Cstring (first call) or Csimple (recursive calls).
+** (In first call, fills %0 with whole match for Cstring.)
+** Returns number of elements in the array that were filled.
+*/
+static int getstrcaps (CapState *cs, StrAux *cps, int n) {
+ int k = n++;
+ cps[k].isstring = 1; /* get string value */
+ cps[k].u.s.s = cs->cap->s; /* starts here */
+ if (!isfullcap(cs->cap++)) { /* nested captures? */
+ while (!isclosecap(cs->cap)) { /* traverse them */
+ if (n >= MAXSTRCAPS) /* too many captures? */
+ nextcap(cs); /* skip extra captures (will not need them) */
+ else if (captype(cs->cap) == Csimple) /* string? */
+ n = getstrcaps(cs, cps, n); /* put info. into array */
+ else {
+ cps[n].isstring = 0; /* not a string */
+ cps[n].u.cp = cs->cap; /* keep original capture */
+ nextcap(cs);
+ n++;
+ }
+ }
+ cs->cap++; /* skip close */
+ }
+ cps[k].u.s.e = closeaddr(cs->cap - 1); /* ends here */
+ return n;
+}
+
+
+/*
+** add next capture value (which should be a string) to buffer 'b'
+*/
+static int addonestring (luaL_Buffer *b, CapState *cs, const char *what);
+
+
+/*
+** String capture: add result to buffer 'b' (instead of pushing
+** it into the stack)
+*/
+static void stringcap (luaL_Buffer *b, CapState *cs) {
+ StrAux cps[MAXSTRCAPS];
+ int n;
+ size_t len, i;
+ const char *fmt; /* format string */
+ fmt = lua_tolstring(cs->L, updatecache(cs, cs->cap->idx), &len);
+ n = getstrcaps(cs, cps, 0) - 1; /* collect nested captures */
+ for (i = 0; i < len; i++) { /* traverse them */
+ if (fmt[i] != '%') /* not an escape? */
+ luaL_addchar(b, fmt[i]); /* add it to buffer */
+ else if (fmt[++i] < '0' || fmt[i] > '9') /* not followed by a digit? */
+ luaL_addchar(b, fmt[i]); /* add to buffer */
+ else {
+ int l = fmt[i] - '0'; /* capture index */
+ if (l > n)
+ luaL_error(cs->L, "invalid capture index (%d)", l);
+ else if (cps[l].isstring)
+ luaL_addlstring(b, cps[l].u.s.s, cps[l].u.s.e - cps[l].u.s.s);
+ else {
+ Capture *curr = cs->cap;
+ cs->cap = cps[l].u.cp; /* go back to evaluate that nested capture */
+ if (!addonestring(b, cs, "capture"))
+ luaL_error(cs->L, "no values in capture index %d", l);
+ cs->cap = curr; /* continue from where it stopped */
+ }
+ }
+ }
+}
+
+
+/*
+** Substitution capture: add result to buffer 'b'
+*/
+static void substcap (luaL_Buffer *b, CapState *cs) {
+ const char *curr = cs->cap->s;
+ if (isfullcap(cs->cap)) /* no nested captures? */
+ luaL_addlstring(b, curr, cs->cap->siz - 1); /* keep original text */
+ else {
+ cs->cap++; /* skip open entry */
+ while (!isclosecap(cs->cap)) { /* traverse nested captures */
+ const char *next = cs->cap->s;
+ luaL_addlstring(b, curr, next - curr); /* add text up to capture */
+ if (addonestring(b, cs, "replacement"))
+ curr = closeaddr(cs->cap - 1); /* continue after match */
+ else /* no capture value */
+ curr = next; /* keep original text in final result */
+ }
+ luaL_addlstring(b, curr, cs->cap->s - curr); /* add last piece of text */
+ }
+ cs->cap++; /* go to next capture */
+}
+
+
+/*
+** Evaluates a capture and adds its first value to buffer 'b'; returns
+** whether there was a value
+*/
+static int addonestring (luaL_Buffer *b, CapState *cs, const char *what) {
+ switch (captype(cs->cap)) {
+ case Cstring:
+ stringcap(b, cs); /* add capture directly to buffer */
+ return 1;
+ case Csubst:
+ substcap(b, cs); /* add capture directly to buffer */
+ return 1;
+ default: {
+ lua_State *L = cs->L;
+ int n = pushcapture(cs);
+ if (n > 0) {
+ if (n > 1) lua_pop(L, n - 1); /* only one result */
+ if (!lua_isstring(L, -1))
+ luaL_error(L, "invalid %s value (a %s)", what, luaL_typename(L, -1));
+ luaL_addvalue(b);
+ }
+ return n;
+ }
+ }
+}
+
+
+/*
+** Push all values of the current capture into the stack; returns
+** number of values pushed
+*/
+static int pushcapture (CapState *cs) {
+ lua_State *L = cs->L;
+ luaL_checkstack(L, 4, "too many captures");
+ switch (captype(cs->cap)) {
+ case Cposition: {
+ lua_pushinteger(L, cs->cap->s - cs->s + 1);
+ cs->cap++;
+ return 1;
+ }
+ case Cconst: {
+ pushluaval(cs);
+ cs->cap++;
+ return 1;
+ }
+ case Carg: {
+ int arg = (cs->cap++)->idx;
+ if (arg + FIXEDARGS > cs->ptop)
+ return luaL_error(L, "reference to absent extra argument #%d", arg);
+ lua_pushvalue(L, arg + FIXEDARGS);
+ return 1;
+ }
+ case Csimple: {
+ int k = pushnestedvalues(cs, 1);
+ lua_insert(L, -k); /* make whole match be first result */
+ return k;
+ }
+ case Cruntime: {
+ lua_pushvalue(L, (cs->cap++)->idx); /* value is in the stack */
+ return 1;
+ }
+ case Cstring: {
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ stringcap(&b, cs);
+ luaL_pushresult(&b);
+ return 1;
+ }
+ case Csubst: {
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ substcap(&b, cs);
+ luaL_pushresult(&b);
+ return 1;
+ }
+ case Cgroup: {
+ if (cs->cap->idx == 0) /* anonymous group? */
+ return pushnestedvalues(cs, 0); /* add all nested values */
+ else { /* named group: add no values */
+ nextcap(cs); /* skip capture */
+ return 0;
+ }
+ }
+ case Cbackref: return backrefcap(cs);
+ case Ctable: return tablecap(cs);
+ case Cfunction: return functioncap(cs);
+ case Cnum: return numcap(cs);
+ case Cquery: return querycap(cs);
+ case Cfold: return foldcap(cs);
+ default: assert(0); return 0;
+ }
+}
+
+
+/*
+** Prepare a CapState structure and traverse the entire list of
+** captures in the stack pushing its results. 's' is the subject
+** string, 'r' is the final position of the match, and 'ptop'
+** the index in the stack where some useful values were pushed.
+** Returns the number of results pushed. (If the list produces no
+** results, push the final position of the match.)
+*/
+int getcaptures (lua_State *L, const char *s, const char *r, int ptop) {
+ Capture *capture = (Capture *)lua_touserdata(L, caplistidx(ptop));
+ int n = 0;
+ if (!isclosecap(capture)) { /* is there any capture? */
+ CapState cs;
+ cs.ocap = cs.cap = capture; cs.L = L;
+ cs.s = s; cs.valuecached = 0; cs.ptop = ptop;
+ do { /* collect their values */
+ n += pushcapture(&cs);
+ } while (!isclosecap(cs.cap));
+ }
+ if (n == 0) { /* no capture values? */
+ lua_pushinteger(L, r - s + 1); /* return only end position */
+ n = 1;
+ }
+ return n;
+}
+
+
diff --git a/lua/src/lpcap.h b/lua/src/lpcap.h
new file mode 100644
index 000000000..d762fdcfa
--- /dev/null
+++ b/lua/src/lpcap.h
@@ -0,0 +1,43 @@
+/*
+** $Id: lpcap.h,v 1.2 2015/02/27 17:13:17 roberto Exp $
+*/
+
+#if !defined(lpcap_h)
+#define lpcap_h
+
+
+#include "lptypes.h"
+
+
+/* kinds of captures */
+typedef enum CapKind {
+ Cclose, Cposition, Cconst, Cbackref, Carg, Csimple, Ctable, Cfunction,
+ Cquery, Cstring, Cnum, Csubst, Cfold, Cruntime, Cgroup
+} CapKind;
+
+
+typedef struct Capture {
+ const char *s; /* subject position */
+ unsigned short idx; /* extra info (group name, arg index, etc.) */
+ byte kind; /* kind of capture */
+ byte siz; /* size of full capture + 1 (0 = not a full capture) */
+} Capture;
+
+
+typedef struct CapState {
+ Capture *cap; /* current capture */
+ Capture *ocap; /* (original) capture list */
+ lua_State *L;
+ int ptop; /* index of last argument to 'match' */
+ const char *s; /* original string */
+ int valuecached; /* value stored in cache slot */
+} CapState;
+
+
+int runtimecap (CapState *cs, Capture *close, const char *s, int *rem);
+int getcaptures (lua_State *L, const char *s, const char *r, int ptop);
+int finddyncap (Capture *cap, Capture *last);
+
+#endif
+
+
diff --git a/lua/src/lpcode.c b/lua/src/lpcode.c
new file mode 100644
index 000000000..fbf44feb1
--- /dev/null
+++ b/lua/src/lpcode.c
@@ -0,0 +1,986 @@
+/*
+** $Id: lpcode.c,v 1.23 2015/06/12 18:36:47 roberto Exp $
+** Copyright 2007, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+*/
+
+#include <limits.h>
+
+
+#include "lua.h"
+#include "lauxlib.h"
+
+#include "lptypes.h"
+#include "lpcode.h"
+
+
+/* signals a "no-instruction */
+#define NOINST -1
+
+
+
+static const Charset fullset_ =
+ {{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}};
+
+static const Charset *fullset = &fullset_;
+
+/*
+** {======================================================
+** Analysis and some optimizations
+** =======================================================
+*/
+
+/*
+** Check whether a charset is empty (returns IFail), singleton (IChar),
+** full (IAny), or none of those (ISet). When singleton, '*c' returns
+** which character it is. (When generic set, the set was the input,
+** so there is no need to return it.)
+*/
+static Opcode charsettype (const byte *cs, int *c) {
+ int count = 0; /* number of characters in the set */
+ int i;
+ int candidate = -1; /* candidate position for the singleton char */
+ for (i = 0; i < CHARSETSIZE; i++) { /* for each byte */
+ int b = cs[i];
+ if (b == 0) { /* is byte empty? */
+ if (count > 1) /* was set neither empty nor singleton? */
+ return ISet; /* neither full nor empty nor singleton */
+ /* else set is still empty or singleton */
+ }
+ else if (b == 0xFF) { /* is byte full? */
+ if (count < (i * BITSPERCHAR)) /* was set not full? */
+ return ISet; /* neither full nor empty nor singleton */
+ else count += BITSPERCHAR; /* set is still full */
+ }
+ else if ((b & (b - 1)) == 0) { /* has byte only one bit? */
+ if (count > 0) /* was set not empty? */
+ return ISet; /* neither full nor empty nor singleton */
+ else { /* set has only one char till now; track it */
+ count++;
+ candidate = i;
+ }
+ }
+ else return ISet; /* byte is neither empty, full, nor singleton */
+ }
+ switch (count) {
+ case 0: return IFail; /* empty set */
+ case 1: { /* singleton; find character bit inside byte */
+ int b = cs[candidate];
+ *c = candidate * BITSPERCHAR;
+ if ((b & 0xF0) != 0) { *c += 4; b >>= 4; }
+ if ((b & 0x0C) != 0) { *c += 2; b >>= 2; }
+ if ((b & 0x02) != 0) { *c += 1; }
+ return IChar;
+ }
+ default: {
+ assert(count == CHARSETSIZE * BITSPERCHAR); /* full set */
+ return IAny;
+ }
+ }
+}
+
+
+/*
+** A few basic operations on Charsets
+*/
+static void cs_complement (Charset *cs) {
+ loopset(i, cs->cs[i] = ~cs->cs[i]);
+}
+
+static int cs_equal (const byte *cs1, const byte *cs2) {
+ loopset(i, if (cs1[i] != cs2[i]) return 0);
+ return 1;
+}
+
+static int cs_disjoint (const Charset *cs1, const Charset *cs2) {
+ loopset(i, if ((cs1->cs[i] & cs2->cs[i]) != 0) return 0;)
+ return 1;
+}
+
+
+/*
+** If 'tree' is a 'char' pattern (TSet, TChar, TAny), convert it into a
+** charset and return 1; else return 0.
+*/
+int tocharset (TTree *tree, Charset *cs) {
+ switch (tree->tag) {
+ case TSet: { /* copy set */
+ loopset(i, cs->cs[i] = treebuffer(tree)[i]);
+ return 1;
+ }
+ case TChar: { /* only one char */
+ assert(0 <= tree->u.n && tree->u.n <= UCHAR_MAX);
+ loopset(i, cs->cs[i] = 0); /* erase all chars */
+ setchar(cs->cs, tree->u.n); /* add that one */
+ return 1;
+ }
+ case TAny: {
+ loopset(i, cs->cs[i] = 0xFF); /* add all characters to the set */
+ return 1;
+ }
+ default: return 0;
+ }
+}
+
+
+/*
+** Check whether a pattern tree has captures
+*/
+int hascaptures (TTree *tree) {
+ tailcall:
+ switch (tree->tag) {
+ case TCapture: case TRunTime:
+ return 1;
+ case TCall:
+ tree = sib2(tree); goto tailcall; /* return hascaptures(sib2(tree)); */
+ case TOpenCall: assert(0);
+ default: {
+ switch (numsiblings[tree->tag]) {
+ case 1: /* return hascaptures(sib1(tree)); */
+ tree = sib1(tree); goto tailcall;
+ case 2:
+ if (hascaptures(sib1(tree))) return 1;
+ /* else return hascaptures(sib2(tree)); */
+ tree = sib2(tree); goto tailcall;
+ default: assert(numsiblings[tree->tag] == 0); return 0;
+ }
+ }
+ }
+}
+
+
+/*
+** Checks how a pattern behaves regarding the empty string,
+** in one of two different ways:
+** A pattern is *nullable* if it can match without consuming any character;
+** A pattern is *nofail* if it never fails for any string
+** (including the empty string).
+** The difference is only for predicates and run-time captures;
+** for other patterns, the two properties are equivalent.
+** (With predicates, &'a' is nullable but not nofail. Of course,
+** nofail => nullable.)
+** These functions are all convervative in the following way:
+** p is nullable => nullable(p)
+** nofail(p) => p cannot fail
+** The function assumes that TOpenCall is not nullable;
+** this will be checked again when the grammar is fixed.
+** Run-time captures can do whatever they want, so the result
+** is conservative.
+*/
+int checkaux (TTree *tree, int pred) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny:
+ case TFalse: case TOpenCall:
+ return 0; /* not nullable */
+ case TRep: case TTrue:
+ return 1; /* no fail */
+ case TNot: case TBehind: /* can match empty, but can fail */
+ if (pred == PEnofail) return 0;
+ else return 1; /* PEnullable */
+ case TAnd: /* can match empty; fail iff body does */
+ if (pred == PEnullable) return 1;
+ /* else return checkaux(sib1(tree), pred); */
+ tree = sib1(tree); goto tailcall;
+ case TRunTime: /* can fail; match empty iff body does */
+ if (pred == PEnofail) return 0;
+ /* else return checkaux(sib1(tree), pred); */
+ tree = sib1(tree); goto tailcall;
+ case TSeq:
+ if (!checkaux(sib1(tree), pred)) return 0;
+ /* else return checkaux(sib2(tree), pred); */
+ tree = sib2(tree); goto tailcall;
+ case TChoice:
+ if (checkaux(sib2(tree), pred)) return 1;
+ /* else return checkaux(sib1(tree), pred); */
+ tree = sib1(tree); goto tailcall;
+ case TCapture: case TGrammar: case TRule:
+ /* return checkaux(sib1(tree), pred); */
+ tree = sib1(tree); goto tailcall;
+ case TCall: /* return checkaux(sib2(tree), pred); */
+ tree = sib2(tree); goto tailcall;
+ default: assert(0); return 0;
+ }
+}
+
+
+/*
+** number of characters to match a pattern (or -1 if variable)
+** ('count' avoids infinite loops for grammars)
+*/
+int fixedlenx (TTree *tree, int count, int len) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny:
+ return len + 1;
+ case TFalse: case TTrue: case TNot: case TAnd: case TBehind:
+ return len;
+ case TRep: case TRunTime: case TOpenCall:
+ return -1;
+ case TCapture: case TRule: case TGrammar:
+ /* return fixedlenx(sib1(tree), count); */
+ tree = sib1(tree); goto tailcall;
+ case TCall:
+ if (count++ >= MAXRULES)
+ return -1; /* may be a loop */
+ /* else return fixedlenx(sib2(tree), count); */
+ tree = sib2(tree); goto tailcall;
+ case TSeq: {
+ len = fixedlenx(sib1(tree), count, len);
+ if (len < 0) return -1;
+ /* else return fixedlenx(sib2(tree), count, len); */
+ tree = sib2(tree); goto tailcall;
+ }
+ case TChoice: {
+ int n1, n2;
+ n1 = fixedlenx(sib1(tree), count, len);
+ if (n1 < 0) return -1;
+ n2 = fixedlenx(sib2(tree), count, len);
+ if (n1 == n2) return n1;
+ else return -1;
+ }
+ default: assert(0); return 0;
+ };
+}
+
+
+/*
+** Computes the 'first set' of a pattern.
+** The result is a conservative aproximation:
+** match p ax -> x (for some x) ==> a belongs to first(p)
+** or
+** a not in first(p) ==> match p ax -> fail (for all x)
+**
+** The set 'follow' is the first set of what follows the
+** pattern (full set if nothing follows it).
+**
+** The function returns 0 when this resulting set can be used for
+** test instructions that avoid the pattern altogether.
+** A non-zero return can happen for two reasons:
+** 1) match p '' -> '' ==> return has bit 1 set
+** (tests cannot be used because they would always fail for an empty input);
+** 2) there is a match-time capture ==> return has bit 2 set
+** (optimizations should not bypass match-time captures).
+*/
+static int getfirst (TTree *tree, const Charset *follow, Charset *firstset) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny: {
+ tocharset(tree, firstset);
+ return 0;
+ }
+ case TTrue: {
+ loopset(i, firstset->cs[i] = follow->cs[i]);
+ return 1; /* accepts the empty string */
+ }
+ case TFalse: {
+ loopset(i, firstset->cs[i] = 0);
+ return 0;
+ }
+ case TChoice: {
+ Charset csaux;
+ int e1 = getfirst(sib1(tree), follow, firstset);
+ int e2 = getfirst(sib2(tree), follow, &csaux);
+ loopset(i, firstset->cs[i] |= csaux.cs[i]);
+ return e1 | e2;
+ }
+ case TSeq: {
+ if (!nullable(sib1(tree))) {
+ /* when p1 is not nullable, p2 has nothing to contribute;
+ return getfirst(sib1(tree), fullset, firstset); */
+ tree = sib1(tree); follow = fullset; goto tailcall;
+ }
+ else { /* FIRST(p1 p2, fl) = FIRST(p1, FIRST(p2, fl)) */
+ Charset csaux;
+ int e2 = getfirst(sib2(tree), follow, &csaux);
+ int e1 = getfirst(sib1(tree), &csaux, firstset);
+ if (e1 == 0) return 0; /* 'e1' ensures that first can be used */
+ else if ((e1 | e2) & 2) /* one of the children has a matchtime? */
+ return 2; /* pattern has a matchtime capture */
+ else return e2; /* else depends on 'e2' */
+ }
+ }
+ case TRep: {
+ getfirst(sib1(tree), follow, firstset);
+ loopset(i, firstset->cs[i] |= follow->cs[i]);
+ return 1; /* accept the empty string */
+ }
+ case TCapture: case TGrammar: case TRule: {
+ /* return getfirst(sib1(tree), follow, firstset); */
+ tree = sib1(tree); goto tailcall;
+ }
+ case TRunTime: { /* function invalidates any follow info. */
+ int e = getfirst(sib1(tree), fullset, firstset);
+ if (e) return 2; /* function is not "protected"? */
+ else return 0; /* pattern inside capture ensures first can be used */
+ }
+ case TCall: {
+ /* return getfirst(sib2(tree), follow, firstset); */
+ tree = sib2(tree); goto tailcall;
+ }
+ case TAnd: {
+ int e = getfirst(sib1(tree), follow, firstset);
+ loopset(i, firstset->cs[i] &= follow->cs[i]);
+ return e;
+ }
+ case TNot: {
+ if (tocharset(sib1(tree), firstset)) {
+ cs_complement(firstset);
+ return 1;
+ }
+ /* else go through */
+ }
+ case TBehind: { /* instruction gives no new information */
+ /* call 'getfirst' only to check for math-time captures */
+ int e = getfirst(sib1(tree), follow, firstset);
+ loopset(i, firstset->cs[i] = follow->cs[i]); /* uses follow */
+ return e | 1; /* always can accept the empty string */
+ }
+ default: assert(0); return 0;
+ }
+}
+
+
+/*
+** If 'headfail(tree)' true, then 'tree' can fail only depending on the
+** next character of the subject.
+*/
+static int headfail (TTree *tree) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny: case TFalse:
+ return 1;
+ case TTrue: case TRep: case TRunTime: case TNot:
+ case TBehind:
+ return 0;
+ case TCapture: case TGrammar: case TRule: case TAnd:
+ tree = sib1(tree); goto tailcall; /* return headfail(sib1(tree)); */
+ case TCall:
+ tree = sib2(tree); goto tailcall; /* return headfail(sib2(tree)); */
+ case TSeq:
+ if (!nofail(sib2(tree))) return 0;
+ /* else return headfail(sib1(tree)); */
+ tree = sib1(tree); goto tailcall;
+ case TChoice:
+ if (!headfail(sib1(tree))) return 0;
+ /* else return headfail(sib2(tree)); */
+ tree = sib2(tree); goto tailcall;
+ default: assert(0); return 0;
+ }
+}
+
+
+/*
+** Check whether the code generation for the given tree can benefit
+** from a follow set (to avoid computing the follow set when it is
+** not needed)
+*/
+static int needfollow (TTree *tree) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny:
+ case TFalse: case TTrue: case TAnd: case TNot:
+ case TRunTime: case TGrammar: case TCall: case TBehind:
+ return 0;
+ case TChoice: case TRep:
+ return 1;
+ case TCapture:
+ tree = sib1(tree); goto tailcall;
+ case TSeq:
+ tree = sib2(tree); goto tailcall;
+ default: assert(0); return 0;
+ }
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** Code generation
+** =======================================================
+*/
+
+
+/*
+** size of an instruction
+*/
+int sizei (const Instruction *i) {
+ switch((Opcode)i->i.code) {
+ case ISet: case ISpan: return CHARSETINSTSIZE;
+ case ITestSet: return CHARSETINSTSIZE + 1;
+ case ITestChar: case ITestAny: case IChoice: case IJmp: case ICall:
+ case IOpenCall: case ICommit: case IPartialCommit: case IBackCommit:
+ return 2;
+ default: return 1;
+ }
+}
+
+
+/*
+** state for the compiler
+*/
+typedef struct CompileState {
+ Pattern *p; /* pattern being compiled */
+ int ncode; /* next position in p->code to be filled */
+ lua_State *L;
+} CompileState;
+
+
+/*
+** code generation is recursive; 'opt' indicates that the code is being
+** generated as the last thing inside an optional pattern (so, if that
+** code is optional too, it can reuse the 'IChoice' already in place for
+** the outer pattern). 'tt' points to a previous test protecting this
+** code (or NOINST). 'fl' is the follow set of the pattern.
+*/
+static void codegen (CompileState *compst, TTree *tree, int opt, int tt,
+ const Charset *fl);
+
+
+void realloccode (lua_State *L, Pattern *p, int nsize) {
+ void *ud;
+ lua_Alloc f = lua_getallocf(L, &ud);
+ void *newblock = f(ud, p->code, p->codesize * sizeof(Instruction),
+ nsize * sizeof(Instruction));
+ if (newblock == NULL && nsize > 0)
+ luaL_error(L, "not enough memory");
+ p->code = (Instruction *)newblock;
+ p->codesize = nsize;
+}
+
+
+static int nextinstruction (CompileState *compst) {
+ int size = compst->p->codesize;
+ if (compst->ncode >= size)
+ realloccode(compst->L, compst->p, size * 2);
+ return compst->ncode++;
+}
+
+
+#define getinstr(cs,i) ((cs)->p->code[i])
+
+
+static int addinstruction (CompileState *compst, Opcode op, int aux) {
+ int i = nextinstruction(compst);
+ getinstr(compst, i).i.code = op;
+ getinstr(compst, i).i.aux = aux;
+ return i;
+}
+
+
+/*
+** Add an instruction followed by space for an offset (to be set later)
+*/
+static int addoffsetinst (CompileState *compst, Opcode op) {
+ int i = addinstruction(compst, op, 0); /* instruction */
+ addinstruction(compst, (Opcode)0, 0); /* open space for offset */
+ assert(op == ITestSet || sizei(&getinstr(compst, i)) == 2);
+ return i;
+}
+
+
+/*
+** Set the offset of an instruction
+*/
+static void setoffset (CompileState *compst, int instruction, int offset) {
+ getinstr(compst, instruction + 1).offset = offset;
+}
+
+
+/*
+** Add a capture instruction:
+** 'op' is the capture instruction; 'cap' the capture kind;
+** 'key' the key into ktable; 'aux' is the optional capture offset
+**
+*/
+static int addinstcap (CompileState *compst, Opcode op, int cap, int key,
+ int aux) {
+ int i = addinstruction(compst, op, joinkindoff(cap, aux));
+ getinstr(compst, i).i.key = key;
+ return i;
+}
+
+
+#define gethere(compst) ((compst)->ncode)
+
+#define target(code,i) ((i) + code[i + 1].offset)
+
+
+/*
+** Patch 'instruction' to jump to 'target'
+*/
+static void jumptothere (CompileState *compst, int instruction, int target) {
+ if (instruction >= 0)
+ setoffset(compst, instruction, target - instruction);
+}
+
+
+/*
+** Patch 'instruction' to jump to current position
+*/
+static void jumptohere (CompileState *compst, int instruction) {
+ jumptothere(compst, instruction, gethere(compst));
+}
+
+
+/*
+** Code an IChar instruction, or IAny if there is an equivalent
+** test dominating it
+*/
+static void codechar (CompileState *compst, int c, int tt) {
+ if (tt >= 0 && getinstr(compst, tt).i.code == ITestChar &&
+ getinstr(compst, tt).i.aux == c)
+ addinstruction(compst, IAny, 0);
+ else
+ addinstruction(compst, IChar, c);
+}
+
+
+/*
+** Add a charset posfix to an instruction
+*/
+static void addcharset (CompileState *compst, const byte *cs) {
+ int p = gethere(compst);
+ int i;
+ for (i = 0; i < (int)CHARSETINSTSIZE - 1; i++)
+ nextinstruction(compst); /* space for buffer */
+ /* fill buffer with charset */
+ loopset(j, getinstr(compst, p).buff[j] = cs[j]);
+}
+
+
+/*
+** code a char set, optimizing unit sets for IChar, "complete"
+** sets for IAny, and empty sets for IFail; also use an IAny
+** when instruction is dominated by an equivalent test.
+*/
+static void codecharset (CompileState *compst, const byte *cs, int tt) {
+ int c = 0; /* (=) to avoid warnings */
+ Opcode op = charsettype(cs, &c);
+ switch (op) {
+ case IChar: codechar(compst, c, tt); break;
+ case ISet: { /* non-trivial set? */
+ if (tt >= 0 && getinstr(compst, tt).i.code == ITestSet &&
+ cs_equal(cs, getinstr(compst, tt + 2).buff))
+ addinstruction(compst, IAny, 0);
+ else {
+ addinstruction(compst, ISet, 0);
+ addcharset(compst, cs);
+ }
+ break;
+ }
+ default: addinstruction(compst, op, c); break;
+ }
+}
+
+
+/*
+** code a test set, optimizing unit sets for ITestChar, "complete"
+** sets for ITestAny, and empty sets for IJmp (always fails).
+** 'e' is true iff test should accept the empty string. (Test
+** instructions in the current VM never accept the empty string.)
+*/
+static int codetestset (CompileState *compst, Charset *cs, int e) {
+ if (e) return NOINST; /* no test */
+ else {
+ int c = 0;
+ Opcode op = charsettype(cs->cs, &c);
+ switch (op) {
+ case IFail: return addoffsetinst(compst, IJmp); /* always jump */
+ case IAny: return addoffsetinst(compst, ITestAny);
+ case IChar: {
+ int i = addoffsetinst(compst, ITestChar);
+ getinstr(compst, i).i.aux = c;
+ return i;
+ }
+ case ISet: {
+ int i = addoffsetinst(compst, ITestSet);
+ addcharset(compst, cs->cs);
+ return i;
+ }
+ default: assert(0); return 0;
+ }
+ }
+}
+
+
+/*
+** Find the final destination of a sequence of jumps
+*/
+static int finaltarget (Instruction *code, int i) {
+ while (code[i].i.code == IJmp)
+ i = target(code, i);
+ return i;
+}
+
+
+/*
+** final label (after traversing any jumps)
+*/
+static int finallabel (Instruction *code, int i) {
+ return finaltarget(code, target(code, i));
+}
+
+
+/*
+** <behind(p)> == behind n; <p> (where n = fixedlen(p))
+*/
+static void codebehind (CompileState *compst, TTree *tree) {
+ if (tree->u.n > 0)
+ addinstruction(compst, IBehind, tree->u.n);
+ codegen(compst, sib1(tree), 0, NOINST, fullset);
+}
+
+
+/*
+** Choice; optimizations:
+** - when p1 is headfail or
+** when first(p1) and first(p2) are disjoint, than
+** a character not in first(p1) cannot go to p1, and a character
+** in first(p1) cannot go to p2 (at it is not in first(p2)).
+** (The optimization is not valid if p1 accepts the empty string,
+** as then there is no character at all...)
+** - when p2 is empty and opt is true; a IPartialCommit can reuse
+** the Choice already active in the stack.
+*/
+static void codechoice (CompileState *compst, TTree *p1, TTree *p2, int opt,
+ const Charset *fl) {
+ int emptyp2 = (p2->tag == TTrue);
+ Charset cs1, cs2;
+ int e1 = getfirst(p1, fullset, &cs1);
+ if (headfail(p1) ||
+ (!e1 && (getfirst(p2, fl, &cs2), cs_disjoint(&cs1, &cs2)))) {
+ /* <p1 / p2> == test (fail(p1)) -> L1 ; p1 ; jmp L2; L1: p2; L2: */
+ int test = codetestset(compst, &cs1, 0);
+ int jmp = NOINST;
+ codegen(compst, p1, 0, test, fl);
+ if (!emptyp2)
+ jmp = addoffsetinst(compst, IJmp);
+ jumptohere(compst, test);
+ codegen(compst, p2, opt, NOINST, fl);
+ jumptohere(compst, jmp);
+ }
+ else if (opt && emptyp2) {
+ /* p1? == IPartialCommit; p1 */
+ jumptohere(compst, addoffsetinst(compst, IPartialCommit));
+ codegen(compst, p1, 1, NOINST, fullset);
+ }
+ else {
+ /* <p1 / p2> ==
+ test(first(p1)) -> L1; choice L1; <p1>; commit L2; L1: <p2>; L2: */
+ int pcommit;
+ int test = codetestset(compst, &cs1, e1);
+ int pchoice = addoffsetinst(compst, IChoice);
+ codegen(compst, p1, emptyp2, test, fullset);
+ pcommit = addoffsetinst(compst, ICommit);
+ jumptohere(compst, pchoice);
+ jumptohere(compst, test);
+ codegen(compst, p2, opt, NOINST, fl);
+ jumptohere(compst, pcommit);
+ }
+}
+
+
+/*
+** And predicate
+** optimization: fixedlen(p) = n ==> <&p> == <p>; behind n
+** (valid only when 'p' has no captures)
+*/
+static void codeand (CompileState *compst, TTree *tree, int tt) {
+ int n = fixedlen(tree);
+ if (n >= 0 && n <= MAXBEHIND && !hascaptures(tree)) {
+ codegen(compst, tree, 0, tt, fullset);
+ if (n > 0)
+ addinstruction(compst, IBehind, n);
+ }
+ else { /* default: Choice L1; p1; BackCommit L2; L1: Fail; L2: */
+ int pcommit;
+ int pchoice = addoffsetinst(compst, IChoice);
+ codegen(compst, tree, 0, tt, fullset);
+ pcommit = addoffsetinst(compst, IBackCommit);
+ jumptohere(compst, pchoice);
+ addinstruction(compst, IFail, 0);
+ jumptohere(compst, pcommit);
+ }
+}
+
+
+/*
+** Captures: if pattern has fixed (and not too big) length, use
+** a single IFullCapture instruction after the match; otherwise,
+** enclose the pattern with OpenCapture - CloseCapture.
+*/
+static void codecapture (CompileState *compst, TTree *tree, int tt,
+ const Charset *fl) {
+ int len = fixedlen(sib1(tree));
+ if (len >= 0 && len <= MAXOFF && !hascaptures(sib1(tree))) {
+ codegen(compst, sib1(tree), 0, tt, fl);
+ addinstcap(compst, IFullCapture, tree->cap, tree->key, len);
+ }
+ else {
+ addinstcap(compst, IOpenCapture, tree->cap, tree->key, 0);
+ codegen(compst, sib1(tree), 0, tt, fl);
+ addinstcap(compst, ICloseCapture, Cclose, 0, 0);
+ }
+}
+
+
+static void coderuntime (CompileState *compst, TTree *tree, int tt) {
+ addinstcap(compst, IOpenCapture, Cgroup, tree->key, 0);
+ codegen(compst, sib1(tree), 0, tt, fullset);
+ addinstcap(compst, ICloseRunTime, Cclose, 0, 0);
+}
+
+
+/*
+** Repetion; optimizations:
+** When pattern is a charset, can use special instruction ISpan.
+** When pattern is head fail, or if it starts with characters that
+** are disjoint from what follows the repetions, a simple test
+** is enough (a fail inside the repetition would backtrack to fail
+** again in the following pattern, so there is no need for a choice).
+** When 'opt' is true, the repetion can reuse the Choice already
+** active in the stack.
+*/
+static void coderep (CompileState *compst, TTree *tree, int opt,
+ const Charset *fl) {
+ Charset st;
+ if (tocharset(tree, &st)) {
+ addinstruction(compst, ISpan, 0);
+ addcharset(compst, st.cs);
+ }
+ else {
+ int e1 = getfirst(tree, fullset, &st);
+ if (headfail(tree) || (!e1 && cs_disjoint(&st, fl))) {
+ /* L1: test (fail(p1)) -> L2; <p>; jmp L1; L2: */
+ int jmp;
+ int test = codetestset(compst, &st, 0);
+ codegen(compst, tree, 0, test, fullset);
+ jmp = addoffsetinst(compst, IJmp);
+ jumptohere(compst, test);
+ jumptothere(compst, jmp, test);
+ }
+ else {
+ /* test(fail(p1)) -> L2; choice L2; L1: <p>; partialcommit L1; L2: */
+ /* or (if 'opt'): partialcommit L1; L1: <p>; partialcommit L1; */
+ int commit, l2;
+ int test = codetestset(compst, &st, e1);
+ int pchoice = NOINST;
+ if (opt)
+ jumptohere(compst, addoffsetinst(compst, IPartialCommit));
+ else
+ pchoice = addoffsetinst(compst, IChoice);
+ l2 = gethere(compst);
+ codegen(compst, tree, 0, NOINST, fullset);
+ commit = addoffsetinst(compst, IPartialCommit);
+ jumptothere(compst, commit, l2);
+ jumptohere(compst, pchoice);
+ jumptohere(compst, test);
+ }
+ }
+}
+
+
+/*
+** Not predicate; optimizations:
+** In any case, if first test fails, 'not' succeeds, so it can jump to
+** the end. If pattern is headfail, that is all (it cannot fail
+** in other parts); this case includes 'not' of simple sets. Otherwise,
+** use the default code (a choice plus a failtwice).
+*/
+static void codenot (CompileState *compst, TTree *tree) {
+ Charset st;
+ int e = getfirst(tree, fullset, &st);
+ int test = codetestset(compst, &st, e);
+ if (headfail(tree)) /* test (fail(p1)) -> L1; fail; L1: */
+ addinstruction(compst, IFail, 0);
+ else {
+ /* test(fail(p))-> L1; choice L1; <p>; failtwice; L1: */
+ int pchoice = addoffsetinst(compst, IChoice);
+ codegen(compst, tree, 0, NOINST, fullset);
+ addinstruction(compst, IFailTwice, 0);
+ jumptohere(compst, pchoice);
+ }
+ jumptohere(compst, test);
+}
+
+
+/*
+** change open calls to calls, using list 'positions' to find
+** correct offsets; also optimize tail calls
+*/
+static void correctcalls (CompileState *compst, int *positions,
+ int from, int to) {
+ int i;
+ Instruction *code = compst->p->code;
+ for (i = from; i < to; i += sizei(&code[i])) {
+ if (code[i].i.code == IOpenCall) {
+ int n = code[i].i.key; /* rule number */
+ int rule = positions[n]; /* rule position */
+ assert(rule == from || code[rule - 1].i.code == IRet);
+ if (code[finaltarget(code, i + 2)].i.code == IRet) /* call; ret ? */
+ code[i].i.code = IJmp; /* tail call */
+ else
+ code[i].i.code = ICall;
+ jumptothere(compst, i, rule); /* call jumps to respective rule */
+ }
+ }
+ assert(i == to);
+}
+
+
+/*
+** Code for a grammar:
+** call L1; jmp L2; L1: rule 1; ret; rule 2; ret; ...; L2:
+*/
+static void codegrammar (CompileState *compst, TTree *grammar) {
+ int positions[MAXRULES];
+ int rulenumber = 0;
+ TTree *rule;
+ int firstcall = addoffsetinst(compst, ICall); /* call initial rule */
+ int jumptoend = addoffsetinst(compst, IJmp); /* jump to the end */
+ int start = gethere(compst); /* here starts the initial rule */
+ jumptohere(compst, firstcall);
+ for (rule = sib1(grammar); rule->tag == TRule; rule = sib2(rule)) {
+ positions[rulenumber++] = gethere(compst); /* save rule position */
+ codegen(compst, sib1(rule), 0, NOINST, fullset); /* code rule */
+ addinstruction(compst, IRet, 0);
+ }
+ assert(rule->tag == TTrue);
+ jumptohere(compst, jumptoend);
+ correctcalls(compst, positions, start, gethere(compst));
+}
+
+
+static void codecall (CompileState *compst, TTree *call) {
+ int c = addoffsetinst(compst, IOpenCall); /* to be corrected later */
+ getinstr(compst, c).i.key = sib2(call)->cap; /* rule number */
+ assert(sib2(call)->tag == TRule);
+}
+
+
+/*
+** Code first child of a sequence
+** (second child is called in-place to allow tail call)
+** Return 'tt' for second child
+*/
+static int codeseq1 (CompileState *compst, TTree *p1, TTree *p2,
+ int tt, const Charset *fl) {
+ if (needfollow(p1)) {
+ Charset fl1;
+ getfirst(p2, fl, &fl1); /* p1 follow is p2 first */
+ codegen(compst, p1, 0, tt, &fl1);
+ }
+ else /* use 'fullset' as follow */
+ codegen(compst, p1, 0, tt, fullset);
+ if (fixedlen(p1) != 0) /* can 'p1' consume anything? */
+ return NOINST; /* invalidate test */
+ else return tt; /* else 'tt' still protects sib2 */
+}
+
+
+/*
+** Main code-generation function: dispatch to auxiliar functions
+** according to kind of tree. ('needfollow' should return true
+** only for consructions that use 'fl'.)
+*/
+static void codegen (CompileState *compst, TTree *tree, int opt, int tt,
+ const Charset *fl) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: codechar(compst, tree->u.n, tt); break;
+ case TAny: addinstruction(compst, IAny, 0); break;
+ case TSet: codecharset(compst, treebuffer(tree), tt); break;
+ case TTrue: break;
+ case TFalse: addinstruction(compst, IFail, 0); break;
+ case TChoice: codechoice(compst, sib1(tree), sib2(tree), opt, fl); break;
+ case TRep: coderep(compst, sib1(tree), opt, fl); break;
+ case TBehind: codebehind(compst, tree); break;
+ case TNot: codenot(compst, sib1(tree)); break;
+ case TAnd: codeand(compst, sib1(tree), tt); break;
+ case TCapture: codecapture(compst, tree, tt, fl); break;
+ case TRunTime: coderuntime(compst, tree, tt); break;
+ case TGrammar: codegrammar(compst, tree); break;
+ case TCall: codecall(compst, tree); break;
+ case TSeq: {
+ tt = codeseq1(compst, sib1(tree), sib2(tree), tt, fl); /* code 'p1' */
+ /* codegen(compst, p2, opt, tt, fl); */
+ tree = sib2(tree); goto tailcall;
+ }
+ default: assert(0);
+ }
+}
+
+
+/*
+** Optimize jumps and other jump-like instructions.
+** * Update labels of instructions with labels to their final
+** destinations (e.g., choice L1; ... L1: jmp L2: becomes
+** choice L2)
+** * Jumps to other instructions that do jumps become those
+** instructions (e.g., jump to return becomes a return; jump
+** to commit becomes a commit)
+*/
+static void peephole (CompileState *compst) {
+ Instruction *code = compst->p->code;
+ int i;
+ for (i = 0; i < compst->ncode; i += sizei(&code[i])) {
+ redo:
+ switch (code[i].i.code) {
+ case IChoice: case ICall: case ICommit: case IPartialCommit:
+ case IBackCommit: case ITestChar: case ITestSet:
+ case ITestAny: { /* instructions with labels */
+ jumptothere(compst, i, finallabel(code, i)); /* optimize label */
+ break;
+ }
+ case IJmp: {
+ int ft = finaltarget(code, i);
+ switch (code[ft].i.code) { /* jumping to what? */
+ case IRet: case IFail: case IFailTwice:
+ case IEnd: { /* instructions with unconditional implicit jumps */
+ code[i] = code[ft]; /* jump becomes that instruction */
+ code[i + 1].i.code = IAny; /* 'no-op' for target position */
+ break;
+ }
+ case ICommit: case IPartialCommit:
+ case IBackCommit: { /* inst. with unconditional explicit jumps */
+ int fft = finallabel(code, ft);
+ code[i] = code[ft]; /* jump becomes that instruction... */
+ jumptothere(compst, i, fft); /* but must correct its offset */
+ goto redo; /* reoptimize its label */
+ }
+ default: {
+ jumptothere(compst, i, ft); /* optimize label */
+ break;
+ }
+ }
+ break;
+ }
+ default: break;
+ }
+ }
+ assert(code[i - 1].i.code == IEnd);
+}
+
+
+/*
+** Compile a pattern
+*/
+Instruction *compile (lua_State *L, Pattern *p) {
+ CompileState compst;
+ compst.p = p; compst.ncode = 0; compst.L = L;
+ realloccode(L, p, 2); /* minimum initial size */
+ codegen(&compst, p->tree, 0, NOINST, fullset);
+ addinstruction(&compst, IEnd, 0);
+ realloccode(L, p, compst.ncode); /* set final size */
+ peephole(&compst);
+ return p->code;
+}
+
+
+/* }====================================================== */
+
diff --git a/lua/src/lpcode.h b/lua/src/lpcode.h
new file mode 100644
index 000000000..896d3c79a
--- /dev/null
+++ b/lua/src/lpcode.h
@@ -0,0 +1,42 @@
+/*
+** $Id: lpcode.h,v 1.7 2015/06/12 18:24:45 roberto Exp $
+*/
+
+#if !defined(lpcode_h)
+#define lpcode_h
+
+#include "lua.h"
+
+#include "lptypes.h"
+#include "lptree.h"
+#include "lpvm.h"
+
+int tocharset (TTree *tree, Charset *cs);
+int checkaux (TTree *tree, int pred);
+int fixedlenx (TTree *tree, int count, int len);
+int hascaptures (TTree *tree);
+int lp_gc (lua_State *L);
+Instruction *compile (lua_State *L, Pattern *p);
+void realloccode (lua_State *L, Pattern *p, int nsize);
+int sizei (const Instruction *i);
+
+
+#define PEnullable 0
+#define PEnofail 1
+
+/*
+** nofail(t) implies that 't' cannot fail with any input
+*/
+#define nofail(t) checkaux(t, PEnofail)
+
+/*
+** (not nullable(t)) implies 't' cannot match without consuming
+** something
+*/
+#define nullable(t) checkaux(t, PEnullable)
+
+#define fixedlen(t) fixedlenx(t, 0, 0)
+
+
+
+#endif
diff --git a/lua/src/lpprint.c b/lua/src/lpprint.c
new file mode 100644
index 000000000..174d1687b
--- /dev/null
+++ b/lua/src/lpprint.c
@@ -0,0 +1,244 @@
+/*
+** $Id: lpprint.c,v 1.9 2015/06/15 16:09:57 roberto Exp $
+** Copyright 2007, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+*/
+
+#include <ctype.h>
+#include <limits.h>
+#include <stdio.h>
+
+
+#include "lptypes.h"
+#include "lpprint.h"
+#include "lpcode.h"
+
+
+#if defined(LPEG_DEBUG)
+
+/*
+** {======================================================
+** Printing patterns (for debugging)
+** =======================================================
+*/
+
+
+void printcharset (const byte *st) {
+ int i;
+ printf("[");
+ for (i = 0; i <= UCHAR_MAX; i++) {
+ int first = i;
+ while (testchar(st, i) && i <= UCHAR_MAX) i++;
+ if (i - 1 == first) /* unary range? */
+ printf("(%02x)", first);
+ else if (i - 1 > first) /* non-empty range? */
+ printf("(%02x-%02x)", first, i - 1);
+ }
+ printf("]");
+}
+
+
+static void printcapkind (int kind) {
+ const char *const modes[] = {
+ "close", "position", "constant", "backref",
+ "argument", "simple", "table", "function",
+ "query", "string", "num", "substitution", "fold",
+ "runtime", "group"};
+ printf("%s", modes[kind]);
+}
+
+
+static void printjmp (const Instruction *op, const Instruction *p) {
+ printf("-> %d", (int)(p + (p + 1)->offset - op));
+}
+
+
+void printinst (const Instruction *op, const Instruction *p) {
+ const char *const names[] = {
+ "any", "char", "set",
+ "testany", "testchar", "testset",
+ "span", "behind",
+ "ret", "end",
+ "choice", "jmp", "call", "open_call",
+ "commit", "partial_commit", "back_commit", "failtwice", "fail", "giveup",
+ "fullcapture", "opencapture", "closecapture", "closeruntime"
+ };
+ printf("%02ld: %s ", (long)(p - op), names[p->i.code]);
+ switch ((Opcode)p->i.code) {
+ case IChar: {
+ printf("'%c'", p->i.aux);
+ break;
+ }
+ case ITestChar: {
+ printf("'%c'", p->i.aux); printjmp(op, p);
+ break;
+ }
+ case IFullCapture: {
+ printcapkind(getkind(p));
+ printf(" (size = %d) (idx = %d)", getoff(p), p->i.key);
+ break;
+ }
+ case IOpenCapture: {
+ printcapkind(getkind(p));
+ printf(" (idx = %d)", p->i.key);
+ break;
+ }
+ case ISet: {
+ printcharset((p+1)->buff);
+ break;
+ }
+ case ITestSet: {
+ printcharset((p+2)->buff); printjmp(op, p);
+ break;
+ }
+ case ISpan: {
+ printcharset((p+1)->buff);
+ break;
+ }
+ case IOpenCall: {
+ printf("-> %d", (p + 1)->offset);
+ break;
+ }
+ case IBehind: {
+ printf("%d", p->i.aux);
+ break;
+ }
+ case IJmp: case ICall: case ICommit: case IChoice:
+ case IPartialCommit: case IBackCommit: case ITestAny: {
+ printjmp(op, p);
+ break;
+ }
+ default: break;
+ }
+ printf("\n");
+}
+
+
+void printpatt (Instruction *p, int n) {
+ Instruction *op = p;
+ while (p < op + n) {
+ printinst(op, p);
+ p += sizei(p);
+ }
+}
+
+
+#if defined(LPEG_DEBUG)
+static void printcap (Capture *cap) {
+ printcapkind(cap->kind);
+ printf(" (idx: %d - size: %d) -> %p\n", cap->idx, cap->siz, cap->s);
+}
+
+
+void printcaplist (Capture *cap, Capture *limit) {
+ printf(">======\n");
+ for (; cap->s && (limit == NULL || cap < limit); cap++)
+ printcap(cap);
+ printf("=======\n");
+}
+#endif
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Printing trees (for debugging)
+** =======================================================
+*/
+
+static const char *tagnames[] = {
+ "char", "set", "any",
+ "true", "false",
+ "rep",
+ "seq", "choice",
+ "not", "and",
+ "call", "opencall", "rule", "grammar",
+ "behind",
+ "capture", "run-time"
+};
+
+
+void printtree (TTree *tree, int ident) {
+ int i;
+ for (i = 0; i < ident; i++) printf(" ");
+ printf("%s", tagnames[tree->tag]);
+ switch (tree->tag) {
+ case TChar: {
+ int c = tree->u.n;
+ if (isprint(c))
+ printf(" '%c'\n", c);
+ else
+ printf(" (%02X)\n", c);
+ break;
+ }
+ case TSet: {
+ printcharset(treebuffer(tree));
+ printf("\n");
+ break;
+ }
+ case TOpenCall: case TCall: {
+ printf(" key: %d\n", tree->key);
+ break;
+ }
+ case TBehind: {
+ printf(" %d\n", tree->u.n);
+ printtree(sib1(tree), ident + 2);
+ break;
+ }
+ case TCapture: {
+ printf(" cap: %d key: %d n: %d\n", tree->cap, tree->key, tree->u.n);
+ printtree(sib1(tree), ident + 2);
+ break;
+ }
+ case TRule: {
+ printf(" n: %d key: %d\n", tree->cap, tree->key);
+ printtree(sib1(tree), ident + 2);
+ break; /* do not print next rule as a sibling */
+ }
+ case TGrammar: {
+ TTree *rule = sib1(tree);
+ printf(" %d\n", tree->u.n); /* number of rules */
+ for (i = 0; i < tree->u.n; i++) {
+ printtree(rule, ident + 2);
+ rule = sib2(rule);
+ }
+ assert(rule->tag == TTrue); /* sentinel */
+ break;
+ }
+ default: {
+ int sibs = numsiblings[tree->tag];
+ printf("\n");
+ if (sibs >= 1) {
+ printtree(sib1(tree), ident + 2);
+ if (sibs >= 2)
+ printtree(sib2(tree), ident + 2);
+ }
+ break;
+ }
+ }
+}
+
+
+void printktable (lua_State *L, int idx) {
+ int n, i;
+ lua_getuservalue(L, idx);
+ if (lua_isnil(L, -1)) /* no ktable? */
+ return;
+ n = lua_rawlen(L, -1);
+ printf("[");
+ for (i = 1; i <= n; i++) {
+ printf("%d = ", i);
+ lua_rawgeti(L, -1, i);
+ if (lua_isstring(L, -1))
+ printf("%s ", lua_tostring(L, -1));
+ else
+ printf("%s ", lua_typename(L, lua_type(L, -1)));
+ lua_pop(L, 1);
+ }
+ printf("]\n");
+ /* leave ktable at the stack */
+}
+
+/* }====================================================== */
+
+#endif
diff --git a/lua/src/lpprint.h b/lua/src/lpprint.h
new file mode 100644
index 000000000..632976076
--- /dev/null
+++ b/lua/src/lpprint.h
@@ -0,0 +1,36 @@
+/*
+** $Id: lpprint.h,v 1.2 2015/06/12 18:18:08 roberto Exp $
+*/
+
+
+#if !defined(lpprint_h)
+#define lpprint_h
+
+
+#include "lptree.h"
+#include "lpvm.h"
+
+
+#if defined(LPEG_DEBUG)
+
+void printpatt (Instruction *p, int n);
+void printtree (TTree *tree, int ident);
+void printktable (lua_State *L, int idx);
+void printcharset (const byte *st);
+void printcaplist (Capture *cap, Capture *limit);
+void printinst (const Instruction *op, const Instruction *p);
+
+#else
+
+#define printktable(L,idx) \
+ luaL_error(L, "function only implemented in debug mode")
+#define printtree(tree,i) \
+ luaL_error(L, "function only implemented in debug mode")
+#define printpatt(p,n) \
+ luaL_error(L, "function only implemented in debug mode")
+
+#endif
+
+
+#endif
+
diff --git a/lua/src/lprefix.h b/lua/src/lprefix.h
new file mode 100644
index 000000000..02daa837f
--- /dev/null
+++ b/lua/src/lprefix.h
@@ -0,0 +1,45 @@
+/*
+** $Id: lprefix.h,v 1.2 2014/12/29 16:54:13 roberto Exp $
+** Definitions for Lua code that must come before any other header file
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lprefix_h
+#define lprefix_h
+
+
+/*
+** Allows POSIX/XSI stuff
+*/
+#if !defined(LUA_USE_C89) /* { */
+
+#if !defined(_XOPEN_SOURCE)
+#define _XOPEN_SOURCE 600
+#elif _XOPEN_SOURCE == 0
+#undef _XOPEN_SOURCE /* use -D_XOPEN_SOURCE=0 to undefine it */
+#endif
+
+/*
+** Allows manipulation of large files in gcc and some other compilers
+*/
+#if !defined(LUA_32BITS) && !defined(_FILE_OFFSET_BITS)
+#define _LARGEFILE_SOURCE 1
+#define _FILE_OFFSET_BITS 64
+#endif
+
+#endif /* } */
+
+
+/*
+** Windows stuff
+*/
+#if defined(_WIN32) /* { */
+
+#if !defined(_CRT_SECURE_NO_WARNINGS)
+#define _CRT_SECURE_NO_WARNINGS /* avoid warnings about ISO C functions */
+#endif
+
+#endif /* } */
+
+#endif
+
diff --git a/lua/src/lptree.c b/lua/src/lptree.c
new file mode 100644
index 000000000..ac5f51503
--- /dev/null
+++ b/lua/src/lptree.c
@@ -0,0 +1,1296 @@
+/*
+** $Id: lptree.c,v 1.21 2015/09/28 17:01:25 roberto Exp $
+** Copyright 2013, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+*/
+
+#include <ctype.h>
+#include <limits.h>
+#include <string.h>
+
+
+#include "lua.h"
+#include "lauxlib.h"
+
+#include "lptypes.h"
+#include "lpcap.h"
+#include "lpcode.h"
+#include "lpprint.h"
+#include "lptree.h"
+
+
+/* number of siblings for each tree */
+const byte numsiblings[] = {
+ 0, 0, 0, /* char, set, any */
+ 0, 0, /* true, false */
+ 1, /* rep */
+ 2, 2, /* seq, choice */
+ 1, 1, /* not, and */
+ 0, 0, 2, 1, /* call, opencall, rule, grammar */
+ 1, /* behind */
+ 1, 1 /* capture, runtime capture */
+};
+
+
+static TTree *newgrammar (lua_State *L, int arg);
+
+
+/*
+** returns a reasonable name for value at index 'idx' on the stack
+*/
+static const char *val2str (lua_State *L, int idx) {
+ const char *k = lua_tostring(L, idx);
+ if (k != NULL)
+ return lua_pushfstring(L, "%s", k);
+ else
+ return lua_pushfstring(L, "(a %s)", luaL_typename(L, idx));
+}
+
+
+/*
+** Fix a TOpenCall into a TCall node, using table 'postable' to
+** translate a key to its rule address in the tree. Raises an
+** error if key does not exist.
+*/
+static void fixonecall (lua_State *L, int postable, TTree *g, TTree *t) {
+ int n;
+ lua_rawgeti(L, -1, t->key); /* get rule's name */
+ lua_gettable(L, postable); /* query name in position table */
+ n = lua_tonumber(L, -1); /* get (absolute) position */
+ lua_pop(L, 1); /* remove position */
+ if (n == 0) { /* no position? */
+ lua_rawgeti(L, -1, t->key); /* get rule's name again */
+ luaL_error(L, "rule '%s' undefined in given grammar", val2str(L, -1));
+ }
+ t->tag = TCall;
+ t->u.ps = n - (t - g); /* position relative to node */
+ assert(sib2(t)->tag == TRule);
+ sib2(t)->key = t->key;
+}
+
+
+/*
+** Transform left associative constructions into right
+** associative ones, for sequence and choice; that is:
+** (t11 + t12) + t2 => t11 + (t12 + t2)
+** (t11 * t12) * t2 => t11 * (t12 * t2)
+** (that is, Op (Op t11 t12) t2 => Op t11 (Op t12 t2))
+*/
+static void correctassociativity (TTree *tree) {
+ TTree *t1 = sib1(tree);
+ assert(tree->tag == TChoice || tree->tag == TSeq);
+ while (t1->tag == tree->tag) {
+ int n1size = tree->u.ps - 1; /* t1 == Op t11 t12 */
+ int n11size = t1->u.ps - 1;
+ int n12size = n1size - n11size - 1;
+ memmove(sib1(tree), sib1(t1), n11size * sizeof(TTree)); /* move t11 */
+ tree->u.ps = n11size + 1;
+ sib2(tree)->tag = tree->tag;
+ sib2(tree)->u.ps = n12size + 1;
+ }
+}
+
+
+/*
+** Make final adjustments in a tree. Fix open calls in tree 't',
+** making them refer to their respective rules or raising appropriate
+** errors (if not inside a grammar). Correct associativity of associative
+** constructions (making them right associative). Assume that tree's
+** ktable is at the top of the stack (for error messages).
+*/
+static void finalfix (lua_State *L, int postable, TTree *g, TTree *t) {
+ tailcall:
+ switch (t->tag) {
+ case TGrammar: /* subgrammars were already fixed */
+ return;
+ case TOpenCall: {
+ if (g != NULL) /* inside a grammar? */
+ fixonecall(L, postable, g, t);
+ else { /* open call outside grammar */
+ lua_rawgeti(L, -1, t->key);
+ luaL_error(L, "rule '%s' used outside a grammar", val2str(L, -1));
+ }
+ break;
+ }
+ case TSeq: case TChoice:
+ correctassociativity(t);
+ break;
+ }
+ switch (numsiblings[t->tag]) {
+ case 1: /* finalfix(L, postable, g, sib1(t)); */
+ t = sib1(t); goto tailcall;
+ case 2:
+ finalfix(L, postable, g, sib1(t));
+ t = sib2(t); goto tailcall; /* finalfix(L, postable, g, sib2(t)); */
+ default: assert(numsiblings[t->tag] == 0); break;
+ }
+}
+
+
+
+/*
+** {===================================================================
+** KTable manipulation
+**
+** - The ktable of a pattern 'p' can be shared by other patterns that
+** contain 'p' and no other constants. Because of this sharing, we
+** should not add elements to a 'ktable' unless it was freshly created
+** for the new pattern.
+**
+** - The maximum index in a ktable is USHRT_MAX, because trees and
+** patterns use unsigned shorts to store those indices.
+** ====================================================================
+*/
+
+/*
+** Create a new 'ktable' to the pattern at the top of the stack.
+*/
+static void newktable (lua_State *L, int n) {
+ lua_createtable(L, n, 0); /* create a fresh table */
+ lua_setuservalue(L, -2); /* set it as 'ktable' for pattern */
+}
+
+
+/*
+** Add element 'idx' to 'ktable' of pattern at the top of the stack;
+** Return index of new element.
+** If new element is nil, does not add it to table (as it would be
+** useless) and returns 0, as ktable[0] is always nil.
+*/
+static int addtoktable (lua_State *L, int idx) {
+ if (lua_isnil(L, idx)) /* nil value? */
+ return 0;
+ else {
+ int n;
+ lua_getuservalue(L, -1); /* get ktable from pattern */
+ n = lua_rawlen(L, -1);
+ if (n >= USHRT_MAX)
+ luaL_error(L, "too many Lua values in pattern");
+ lua_pushvalue(L, idx); /* element to be added */
+ lua_rawseti(L, -2, ++n);
+ lua_pop(L, 1); /* remove 'ktable' */
+ return n;
+ }
+}
+
+
+/*
+** Return the number of elements in the ktable at 'idx'.
+** In Lua 5.2/5.3, default "environment" for patterns is nil, not
+** a table. Treat it as an empty table. In Lua 5.1, assumes that
+** the environment has no numeric indices (len == 0)
+*/
+static int ktablelen (lua_State *L, int idx) {
+ if (!lua_istable(L, idx)) return 0;
+ else return lua_rawlen(L, idx);
+}
+
+
+/*
+** Concatentate the contents of table 'idx1' into table 'idx2'.
+** (Assume that both indices are negative.)
+** Return the original length of table 'idx2' (or 0, if no
+** element was added, as there is no need to correct any index).
+*/
+static int concattable (lua_State *L, int idx1, int idx2) {
+ int i;
+ int n1 = ktablelen(L, idx1);
+ int n2 = ktablelen(L, idx2);
+ if (n1 + n2 > USHRT_MAX)
+ luaL_error(L, "too many Lua values in pattern");
+ if (n1 == 0) return 0; /* nothing to correct */
+ for (i = 1; i <= n1; i++) {
+ lua_rawgeti(L, idx1, i);
+ lua_rawseti(L, idx2 - 1, n2 + i); /* correct 'idx2' */
+ }
+ return n2;
+}
+
+
+/*
+** When joining 'ktables', constants from one of the subpatterns must
+** be renumbered; 'correctkeys' corrects their indices (adding 'n'
+** to each of them)
+*/
+static void correctkeys (TTree *tree, int n) {
+ if (n == 0) return; /* no correction? */
+ tailcall:
+ switch (tree->tag) {
+ case TOpenCall: case TCall: case TRunTime: case TRule: {
+ if (tree->key > 0)
+ tree->key += n;
+ break;
+ }
+ case TCapture: {
+ if (tree->key > 0 && tree->cap != Carg && tree->cap != Cnum)
+ tree->key += n;
+ break;
+ }
+ default: break;
+ }
+ switch (numsiblings[tree->tag]) {
+ case 1: /* correctkeys(sib1(tree), n); */
+ tree = sib1(tree); goto tailcall;
+ case 2:
+ correctkeys(sib1(tree), n);
+ tree = sib2(tree); goto tailcall; /* correctkeys(sib2(tree), n); */
+ default: assert(numsiblings[tree->tag] == 0); break;
+ }
+}
+
+
+/*
+** Join the ktables from p1 and p2 the ktable for the new pattern at the
+** top of the stack, reusing them when possible.
+*/
+static void joinktables (lua_State *L, int p1, TTree *t2, int p2) {
+ int n1, n2;
+ lua_getuservalue(L, p1); /* get ktables */
+ lua_getuservalue(L, p2);
+ n1 = ktablelen(L, -2);
+ n2 = ktablelen(L, -1);
+ if (n1 == 0 && n2 == 0) /* are both tables empty? */
+ lua_pop(L, 2); /* nothing to be done; pop tables */
+ else if (n2 == 0 || lp_equal(L, -2, -1)) { /* 2nd table empty or equal? */
+ lua_pop(L, 1); /* pop 2nd table */
+ lua_setuservalue(L, -2); /* set 1st ktable into new pattern */
+ }
+ else if (n1 == 0) { /* first table is empty? */
+ lua_setuservalue(L, -3); /* set 2nd table into new pattern */
+ lua_pop(L, 1); /* pop 1st table */
+ }
+ else {
+ lua_createtable(L, n1 + n2, 0); /* create ktable for new pattern */
+ /* stack: new p; ktable p1; ktable p2; new ktable */
+ concattable(L, -3, -1); /* from p1 into new ktable */
+ concattable(L, -2, -1); /* from p2 into new ktable */
+ lua_setuservalue(L, -4); /* new ktable becomes 'p' environment */
+ lua_pop(L, 2); /* pop other ktables */
+ correctkeys(t2, n1); /* correction for indices from p2 */
+ }
+}
+
+
+/*
+** copy 'ktable' of element 'idx' to new tree (on top of stack)
+*/
+static void copyktable (lua_State *L, int idx) {
+ lua_getuservalue(L, idx);
+ lua_setuservalue(L, -2);
+}
+
+
+/*
+** merge 'ktable' from 'stree' at stack index 'idx' into 'ktable'
+** from tree at the top of the stack, and correct corresponding
+** tree.
+*/
+static void mergektable (lua_State *L, int idx, TTree *stree) {
+ int n;
+ lua_getuservalue(L, -1); /* get ktables */
+ lua_getuservalue(L, idx);
+ n = concattable(L, -1, -2);
+ lua_pop(L, 2); /* remove both ktables */
+ correctkeys(stree, n);
+}
+
+
+/*
+** Create a new 'ktable' to the pattern at the top of the stack, adding
+** all elements from pattern 'p' (if not 0) plus element 'idx' to it.
+** Return index of new element.
+*/
+static int addtonewktable (lua_State *L, int p, int idx) {
+ newktable(L, 1);
+ if (p)
+ mergektable(L, p, NULL);
+ return addtoktable(L, idx);
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Tree generation
+** =======================================================
+*/
+
+/*
+** In 5.2, could use 'luaL_testudata'...
+*/
+static int testpattern (lua_State *L, int idx) {
+ if (lua_touserdata(L, idx)) { /* value is a userdata? */
+ if (lua_getmetatable(L, idx)) { /* does it have a metatable? */
+ luaL_getmetatable(L, PATTERN_T);
+ if (lua_rawequal(L, -1, -2)) { /* does it have the correct mt? */
+ lua_pop(L, 2); /* remove both metatables */
+ return 1;
+ }
+ }
+ }
+ return 0;
+}
+
+
+static Pattern *getpattern (lua_State *L, int idx) {
+ return (Pattern *)luaL_checkudata(L, idx, PATTERN_T);
+}
+
+
+static int getsize (lua_State *L, int idx) {
+ return (lua_rawlen(L, idx) - sizeof(Pattern)) / sizeof(TTree) + 1;
+}
+
+
+static TTree *gettree (lua_State *L, int idx, int *len) {
+ Pattern *p = getpattern(L, idx);
+ if (len)
+ *len = getsize(L, idx);
+ return p->tree;
+}
+
+
+/*
+** create a pattern. Set its uservalue (the 'ktable') equal to its
+** metatable. (It could be any empty sequence; the metatable is at
+** hand here, so we use it.)
+*/
+static TTree *newtree (lua_State *L, int len) {
+ size_t size = (len - 1) * sizeof(TTree) + sizeof(Pattern);
+ Pattern *p = (Pattern *)lua_newuserdata(L, size);
+ luaL_getmetatable(L, PATTERN_T);
+ lua_pushvalue(L, -1);
+ lua_setuservalue(L, -3);
+ lua_setmetatable(L, -2);
+ p->code = NULL; p->codesize = 0;
+ return p->tree;
+}
+
+
+static TTree *newleaf (lua_State *L, int tag) {
+ TTree *tree = newtree(L, 1);
+ tree->tag = tag;
+ return tree;
+}
+
+
+static TTree *newcharset (lua_State *L) {
+ TTree *tree = newtree(L, bytes2slots(CHARSETSIZE) + 1);
+ tree->tag = TSet;
+ loopset(i, treebuffer(tree)[i] = 0);
+ return tree;
+}
+
+
+/*
+** add to tree a sequence where first sibling is 'sib' (with size
+** 'sibsize'); returns position for second sibling
+*/
+static TTree *seqaux (TTree *tree, TTree *sib, int sibsize) {
+ tree->tag = TSeq; tree->u.ps = sibsize + 1;
+ memcpy(sib1(tree), sib, sibsize * sizeof(TTree));
+ return sib2(tree);
+}
+
+
+/*
+** Build a sequence of 'n' nodes, each with tag 'tag' and 'u.n' got
+** from the array 's' (or 0 if array is NULL). (TSeq is binary, so it
+** must build a sequence of sequence of sequence...)
+*/
+static void fillseq (TTree *tree, int tag, int n, const char *s) {
+ int i;
+ for (i = 0; i < n - 1; i++) { /* initial n-1 copies of Seq tag; Seq ... */
+ tree->tag = TSeq; tree->u.ps = 2;
+ sib1(tree)->tag = tag;
+ sib1(tree)->u.n = s ? (byte)s[i] : 0;
+ tree = sib2(tree);
+ }
+ tree->tag = tag; /* last one does not need TSeq */
+ tree->u.n = s ? (byte)s[i] : 0;
+}
+
+
+/*
+** Numbers as patterns:
+** 0 == true (always match); n == TAny repeated 'n' times;
+** -n == not (TAny repeated 'n' times)
+*/
+static TTree *numtree (lua_State *L, int n) {
+ if (n == 0)
+ return newleaf(L, TTrue);
+ else {
+ TTree *tree, *nd;
+ if (n > 0)
+ tree = nd = newtree(L, 2 * n - 1);
+ else { /* negative: code it as !(-n) */
+ n = -n;
+ tree = newtree(L, 2 * n);
+ tree->tag = TNot;
+ nd = sib1(tree);
+ }
+ fillseq(nd, TAny, n, NULL); /* sequence of 'n' any's */
+ return tree;
+ }
+}
+
+
+/*
+** Convert value at index 'idx' to a pattern
+*/
+static TTree *getpatt (lua_State *L, int idx, int *len) {
+ TTree *tree;
+ switch (lua_type(L, idx)) {
+ case LUA_TSTRING: {
+ size_t slen;
+ const char *s = lua_tolstring(L, idx, &slen); /* get string */
+ if (slen == 0) /* empty? */
+ tree = newleaf(L, TTrue); /* always match */
+ else {
+ tree = newtree(L, 2 * (slen - 1) + 1);
+ fillseq(tree, TChar, slen, s); /* sequence of 'slen' chars */
+ }
+ break;
+ }
+ case LUA_TNUMBER: {
+ int n = lua_tointeger(L, idx);
+ tree = numtree(L, n);
+ break;
+ }
+ case LUA_TBOOLEAN: {
+ tree = (lua_toboolean(L, idx) ? newleaf(L, TTrue) : newleaf(L, TFalse));
+ break;
+ }
+ case LUA_TTABLE: {
+ tree = newgrammar(L, idx);
+ break;
+ }
+ case LUA_TFUNCTION: {
+ tree = newtree(L, 2);
+ tree->tag = TRunTime;
+ tree->key = addtonewktable(L, 0, idx);
+ sib1(tree)->tag = TTrue;
+ break;
+ }
+ default: {
+ return gettree(L, idx, len);
+ }
+ }
+ lua_replace(L, idx); /* put new tree into 'idx' slot */
+ if (len)
+ *len = getsize(L, idx);
+ return tree;
+}
+
+
+/*
+** create a new tree, whith a new root and one sibling.
+** Sibling must be on the Lua stack, at index 1.
+*/
+static TTree *newroot1sib (lua_State *L, int tag) {
+ int s1;
+ TTree *tree1 = getpatt(L, 1, &s1);
+ TTree *tree = newtree(L, 1 + s1); /* create new tree */
+ tree->tag = tag;
+ memcpy(sib1(tree), tree1, s1 * sizeof(TTree));
+ copyktable(L, 1);
+ return tree;
+}
+
+
+/*
+** create a new tree, whith a new root and 2 siblings.
+** Siblings must be on the Lua stack, first one at index 1.
+*/
+static TTree *newroot2sib (lua_State *L, int tag) {
+ int s1, s2;
+ TTree *tree1 = getpatt(L, 1, &s1);
+ TTree *tree2 = getpatt(L, 2, &s2);
+ TTree *tree = newtree(L, 1 + s1 + s2); /* create new tree */
+ tree->tag = tag;
+ tree->u.ps = 1 + s1;
+ memcpy(sib1(tree), tree1, s1 * sizeof(TTree));
+ memcpy(sib2(tree), tree2, s2 * sizeof(TTree));
+ joinktables(L, 1, sib2(tree), 2);
+ return tree;
+}
+
+
+static int lp_P (lua_State *L) {
+ luaL_checkany(L, 1);
+ getpatt(L, 1, NULL);
+ lua_settop(L, 1);
+ return 1;
+}
+
+
+/*
+** sequence operator; optimizations:
+** false x => false, x true => x, true x => x
+** (cannot do x . false => false because x may have runtime captures)
+*/
+static int lp_seq (lua_State *L) {
+ TTree *tree1 = getpatt(L, 1, NULL);
+ TTree *tree2 = getpatt(L, 2, NULL);
+ if (tree1->tag == TFalse || tree2->tag == TTrue)
+ lua_pushvalue(L, 1); /* false . x == false, x . true = x */
+ else if (tree1->tag == TTrue)
+ lua_pushvalue(L, 2); /* true . x = x */
+ else
+ newroot2sib(L, TSeq);
+ return 1;
+}
+
+
+/*
+** choice operator; optimizations:
+** charset / charset => charset
+** true / x => true, x / false => x, false / x => x
+** (x / true is not equivalent to true)
+*/
+static int lp_choice (lua_State *L) {
+ Charset st1, st2;
+ TTree *t1 = getpatt(L, 1, NULL);
+ TTree *t2 = getpatt(L, 2, NULL);
+ if (tocharset(t1, &st1) && tocharset(t2, &st2)) {
+ TTree *t = newcharset(L);
+ loopset(i, treebuffer(t)[i] = st1.cs[i] | st2.cs[i]);
+ }
+ else if (nofail(t1) || t2->tag == TFalse)
+ lua_pushvalue(L, 1); /* true / x => true, x / false => x */
+ else if (t1->tag == TFalse)
+ lua_pushvalue(L, 2); /* false / x => x */
+ else
+ newroot2sib(L, TChoice);
+ return 1;
+}
+
+
+/*
+** p^n
+*/
+static int lp_star (lua_State *L) {
+ int size1;
+ int n = (int)luaL_checkinteger(L, 2);
+ TTree *tree1 = getpatt(L, 1, &size1);
+ if (n >= 0) { /* seq tree1 (seq tree1 ... (seq tree1 (rep tree1))) */
+ TTree *tree = newtree(L, (n + 1) * (size1 + 1));
+ if (nullable(tree1))
+ luaL_error(L, "loop body may accept empty string");
+ while (n--) /* repeat 'n' times */
+ tree = seqaux(tree, tree1, size1);
+ tree->tag = TRep;
+ memcpy(sib1(tree), tree1, size1 * sizeof(TTree));
+ }
+ else { /* choice (seq tree1 ... choice tree1 true ...) true */
+ TTree *tree;
+ n = -n;
+ /* size = (choice + seq + tree1 + true) * n, but the last has no seq */
+ tree = newtree(L, n * (size1 + 3) - 1);
+ for (; n > 1; n--) { /* repeat (n - 1) times */
+ tree->tag = TChoice; tree->u.ps = n * (size1 + 3) - 2;
+ sib2(tree)->tag = TTrue;
+ tree = sib1(tree);
+ tree = seqaux(tree, tree1, size1);
+ }
+ tree->tag = TChoice; tree->u.ps = size1 + 1;
+ sib2(tree)->tag = TTrue;
+ memcpy(sib1(tree), tree1, size1 * sizeof(TTree));
+ }
+ copyktable(L, 1);
+ return 1;
+}
+
+
+/*
+** #p == &p
+*/
+static int lp_and (lua_State *L) {
+ newroot1sib(L, TAnd);
+ return 1;
+}
+
+
+/*
+** -p == !p
+*/
+static int lp_not (lua_State *L) {
+ newroot1sib(L, TNot);
+ return 1;
+}
+
+
+/*
+** [t1 - t2] == Seq (Not t2) t1
+** If t1 and t2 are charsets, make their difference.
+*/
+static int lp_sub (lua_State *L) {
+ Charset st1, st2;
+ int s1, s2;
+ TTree *t1 = getpatt(L, 1, &s1);
+ TTree *t2 = getpatt(L, 2, &s2);
+ if (tocharset(t1, &st1) && tocharset(t2, &st2)) {
+ TTree *t = newcharset(L);
+ loopset(i, treebuffer(t)[i] = st1.cs[i] & ~st2.cs[i]);
+ }
+ else {
+ TTree *tree = newtree(L, 2 + s1 + s2);
+ tree->tag = TSeq; /* sequence of... */
+ tree->u.ps = 2 + s2;
+ sib1(tree)->tag = TNot; /* ...not... */
+ memcpy(sib1(sib1(tree)), t2, s2 * sizeof(TTree)); /* ...t2 */
+ memcpy(sib2(tree), t1, s1 * sizeof(TTree)); /* ... and t1 */
+ joinktables(L, 1, sib1(tree), 2);
+ }
+ return 1;
+}
+
+
+static int lp_set (lua_State *L) {
+ size_t l;
+ const char *s = luaL_checklstring(L, 1, &l);
+ TTree *tree = newcharset(L);
+ while (l--) {
+ setchar(treebuffer(tree), (byte)(*s));
+ s++;
+ }
+ return 1;
+}
+
+
+static int lp_range (lua_State *L) {
+ int arg;
+ int top = lua_gettop(L);
+ TTree *tree = newcharset(L);
+ for (arg = 1; arg <= top; arg++) {
+ int c;
+ size_t l;
+ const char *r = luaL_checklstring(L, arg, &l);
+ luaL_argcheck(L, l == 2, arg, "range must have two characters");
+ for (c = (byte)r[0]; c <= (byte)r[1]; c++)
+ setchar(treebuffer(tree), c);
+ }
+ return 1;
+}
+
+
+/*
+** Look-behind predicate
+*/
+static int lp_behind (lua_State *L) {
+ TTree *tree;
+ TTree *tree1 = getpatt(L, 1, NULL);
+ int n = fixedlen(tree1);
+ luaL_argcheck(L, n >= 0, 1, "pattern may not have fixed length");
+ luaL_argcheck(L, !hascaptures(tree1), 1, "pattern have captures");
+ luaL_argcheck(L, n <= MAXBEHIND, 1, "pattern too long to look behind");
+ tree = newroot1sib(L, TBehind);
+ tree->u.n = n;
+ return 1;
+}
+
+
+/*
+** Create a non-terminal
+*/
+static int lp_V (lua_State *L) {
+ TTree *tree = newleaf(L, TOpenCall);
+ luaL_argcheck(L, !lua_isnoneornil(L, 1), 1, "non-nil value expected");
+ tree->key = addtonewktable(L, 0, 1);
+ return 1;
+}
+
+
+/*
+** Create a tree for a non-empty capture, with a body and
+** optionally with an associated Lua value (at index 'labelidx' in the
+** stack)
+*/
+static int capture_aux (lua_State *L, int cap, int labelidx) {
+ TTree *tree = newroot1sib(L, TCapture);
+ tree->cap = cap;
+ tree->key = (labelidx == 0) ? 0 : addtonewktable(L, 1, labelidx);
+ return 1;
+}
+
+
+/*
+** Fill a tree with an empty capture, using an empty (TTrue) sibling.
+*/
+static TTree *auxemptycap (TTree *tree, int cap) {
+ tree->tag = TCapture;
+ tree->cap = cap;
+ sib1(tree)->tag = TTrue;
+ return tree;
+}
+
+
+/*
+** Create a tree for an empty capture
+*/
+static TTree *newemptycap (lua_State *L, int cap) {
+ return auxemptycap(newtree(L, 2), cap);
+}
+
+
+/*
+** Create a tree for an empty capture with an associated Lua value
+*/
+static TTree *newemptycapkey (lua_State *L, int cap, int idx) {
+ TTree *tree = auxemptycap(newtree(L, 2), cap);
+ tree->key = addtonewktable(L, 0, idx);
+ return tree;
+}
+
+
+/*
+** Captures with syntax p / v
+** (function capture, query capture, string capture, or number capture)
+*/
+static int lp_divcapture (lua_State *L) {
+ switch (lua_type(L, 2)) {
+ case LUA_TFUNCTION: return capture_aux(L, Cfunction, 2);
+ case LUA_TTABLE: return capture_aux(L, Cquery, 2);
+ case LUA_TSTRING: return capture_aux(L, Cstring, 2);
+ case LUA_TNUMBER: {
+ int n = lua_tointeger(L, 2);
+ TTree *tree = newroot1sib(L, TCapture);
+ luaL_argcheck(L, 0 <= n && n <= SHRT_MAX, 1, "invalid number");
+ tree->cap = Cnum;
+ tree->key = n;
+ return 1;
+ }
+ default: return luaL_argerror(L, 2, "invalid replacement value");
+ }
+}
+
+
+static int lp_substcapture (lua_State *L) {
+ return capture_aux(L, Csubst, 0);
+}
+
+
+static int lp_tablecapture (lua_State *L) {
+ return capture_aux(L, Ctable, 0);
+}
+
+
+static int lp_groupcapture (lua_State *L) {
+ if (lua_isnoneornil(L, 2))
+ return capture_aux(L, Cgroup, 0);
+ else
+ return capture_aux(L, Cgroup, 2);
+}
+
+
+static int lp_foldcapture (lua_State *L) {
+ luaL_checktype(L, 2, LUA_TFUNCTION);
+ return capture_aux(L, Cfold, 2);
+}
+
+
+static int lp_simplecapture (lua_State *L) {
+ return capture_aux(L, Csimple, 0);
+}
+
+
+static int lp_poscapture (lua_State *L) {
+ newemptycap(L, Cposition);
+ return 1;
+}
+
+
+static int lp_argcapture (lua_State *L) {
+ int n = (int)luaL_checkinteger(L, 1);
+ TTree *tree = newemptycap(L, Carg);
+ tree->key = n;
+ luaL_argcheck(L, 0 < n && n <= SHRT_MAX, 1, "invalid argument index");
+ return 1;
+}
+
+
+static int lp_backref (lua_State *L) {
+ luaL_checkany(L, 1);
+ newemptycapkey(L, Cbackref, 1);
+ return 1;
+}
+
+
+/*
+** Constant capture
+*/
+static int lp_constcapture (lua_State *L) {
+ int i;
+ int n = lua_gettop(L); /* number of values */
+ if (n == 0) /* no values? */
+ newleaf(L, TTrue); /* no capture */
+ else if (n == 1)
+ newemptycapkey(L, Cconst, 1); /* single constant capture */
+ else { /* create a group capture with all values */
+ TTree *tree = newtree(L, 1 + 3 * (n - 1) + 2);
+ newktable(L, n); /* create a 'ktable' for new tree */
+ tree->tag = TCapture;
+ tree->cap = Cgroup;
+ tree->key = 0;
+ tree = sib1(tree);
+ for (i = 1; i <= n - 1; i++) {
+ tree->tag = TSeq;
+ tree->u.ps = 3; /* skip TCapture and its sibling */
+ auxemptycap(sib1(tree), Cconst);
+ sib1(tree)->key = addtoktable(L, i);
+ tree = sib2(tree);
+ }
+ auxemptycap(tree, Cconst);
+ tree->key = addtoktable(L, i);
+ }
+ return 1;
+}
+
+
+static int lp_matchtime (lua_State *L) {
+ TTree *tree;
+ luaL_checktype(L, 2, LUA_TFUNCTION);
+ tree = newroot1sib(L, TRunTime);
+ tree->key = addtonewktable(L, 1, 2);
+ return 1;
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** Grammar - Tree generation
+** =======================================================
+*/
+
+/*
+** push on the stack the index and the pattern for the
+** initial rule of grammar at index 'arg' in the stack;
+** also add that index into position table.
+*/
+static void getfirstrule (lua_State *L, int arg, int postab) {
+ lua_rawgeti(L, arg, 1); /* access first element */
+ if (lua_isstring(L, -1)) { /* is it the name of initial rule? */
+ lua_pushvalue(L, -1); /* duplicate it to use as key */
+ lua_gettable(L, arg); /* get associated rule */
+ }
+ else {
+ lua_pushinteger(L, 1); /* key for initial rule */
+ lua_insert(L, -2); /* put it before rule */
+ }
+ if (!testpattern(L, -1)) { /* initial rule not a pattern? */
+ if (lua_isnil(L, -1))
+ luaL_error(L, "grammar has no initial rule");
+ else
+ luaL_error(L, "initial rule '%s' is not a pattern", lua_tostring(L, -2));
+ }
+ lua_pushvalue(L, -2); /* push key */
+ lua_pushinteger(L, 1); /* push rule position (after TGrammar) */
+ lua_settable(L, postab); /* insert pair at position table */
+}
+
+/*
+** traverse grammar at index 'arg', pushing all its keys and patterns
+** into the stack. Create a new table (before all pairs key-pattern) to
+** collect all keys and their associated positions in the final tree
+** (the "position table").
+** Return the number of rules and (in 'totalsize') the total size
+** for the new tree.
+*/
+static int collectrules (lua_State *L, int arg, int *totalsize) {
+ int n = 1; /* to count number of rules */
+ int postab = lua_gettop(L) + 1; /* index of position table */
+ int size; /* accumulator for total size */
+ lua_newtable(L); /* create position table */
+ getfirstrule(L, arg, postab);
+ size = 2 + getsize(L, postab + 2); /* TGrammar + TRule + rule */
+ lua_pushnil(L); /* prepare to traverse grammar table */
+ while (lua_next(L, arg) != 0) {
+ if (lua_tonumber(L, -2) == 1 ||
+ lp_equal(L, -2, postab + 1)) { /* initial rule? */
+ lua_pop(L, 1); /* remove value (keep key for lua_next) */
+ continue;
+ }
+ if (!testpattern(L, -1)) /* value is not a pattern? */
+ luaL_error(L, "rule '%s' is not a pattern", val2str(L, -2));
+ luaL_checkstack(L, LUA_MINSTACK, "grammar has too many rules");
+ lua_pushvalue(L, -2); /* push key (to insert into position table) */
+ lua_pushinteger(L, size);
+ lua_settable(L, postab);
+ size += 1 + getsize(L, -1); /* update size */
+ lua_pushvalue(L, -2); /* push key (for next lua_next) */
+ n++;
+ }
+ *totalsize = size + 1; /* TTrue to finish list of rules */
+ return n;
+}
+
+
+static void buildgrammar (lua_State *L, TTree *grammar, int frule, int n) {
+ int i;
+ TTree *nd = sib1(grammar); /* auxiliary pointer to traverse the tree */
+ for (i = 0; i < n; i++) { /* add each rule into new tree */
+ int ridx = frule + 2*i + 1; /* index of i-th rule */
+ int rulesize;
+ TTree *rn = gettree(L, ridx, &rulesize);
+ nd->tag = TRule;
+ nd->key = 0;
+ nd->cap = i; /* rule number */
+ nd->u.ps = rulesize + 1; /* point to next rule */
+ memcpy(sib1(nd), rn, rulesize * sizeof(TTree)); /* copy rule */
+ mergektable(L, ridx, sib1(nd)); /* merge its ktable into new one */
+ nd = sib2(nd); /* move to next rule */
+ }
+ nd->tag = TTrue; /* finish list of rules */
+}
+
+
+/*
+** Check whether a tree has potential infinite loops
+*/
+static int checkloops (TTree *tree) {
+ tailcall:
+ if (tree->tag == TRep && nullable(sib1(tree)))
+ return 1;
+ else if (tree->tag == TGrammar)
+ return 0; /* sub-grammars already checked */
+ else {
+ switch (numsiblings[tree->tag]) {
+ case 1: /* return checkloops(sib1(tree)); */
+ tree = sib1(tree); goto tailcall;
+ case 2:
+ if (checkloops(sib1(tree))) return 1;
+ /* else return checkloops(sib2(tree)); */
+ tree = sib2(tree); goto tailcall;
+ default: assert(numsiblings[tree->tag] == 0); return 0;
+ }
+ }
+}
+
+
+static int verifyerror (lua_State *L, int *passed, int npassed) {
+ int i, j;
+ for (i = npassed - 1; i >= 0; i--) { /* search for a repetition */
+ for (j = i - 1; j >= 0; j--) {
+ if (passed[i] == passed[j]) {
+ lua_rawgeti(L, -1, passed[i]); /* get rule's key */
+ return luaL_error(L, "rule '%s' may be left recursive", val2str(L, -1));
+ }
+ }
+ }
+ return luaL_error(L, "too many left calls in grammar");
+}
+
+
+/*
+** Check whether a rule can be left recursive; raise an error in that
+** case; otherwise return 1 iff pattern is nullable.
+** The return value is used to check sequences, where the second pattern
+** is only relevant if the first is nullable.
+** Parameter 'nb' works as an accumulator, to allow tail calls in
+** choices. ('nb' true makes function returns true.)
+** Assume ktable at the top of the stack.
+*/
+static int verifyrule (lua_State *L, TTree *tree, int *passed, int npassed,
+ int nb) {
+ tailcall:
+ switch (tree->tag) {
+ case TChar: case TSet: case TAny:
+ case TFalse:
+ return nb; /* cannot pass from here */
+ case TTrue:
+ case TBehind: /* look-behind cannot have calls */
+ return 1;
+ case TNot: case TAnd: case TRep:
+ /* return verifyrule(L, sib1(tree), passed, npassed, 1); */
+ tree = sib1(tree); nb = 1; goto tailcall;
+ case TCapture: case TRunTime:
+ /* return verifyrule(L, sib1(tree), passed, npassed, nb); */
+ tree = sib1(tree); goto tailcall;
+ case TCall:
+ /* return verifyrule(L, sib2(tree), passed, npassed, nb); */
+ tree = sib2(tree); goto tailcall;
+ case TSeq: /* only check 2nd child if first is nb */
+ if (!verifyrule(L, sib1(tree), passed, npassed, 0))
+ return nb;
+ /* else return verifyrule(L, sib2(tree), passed, npassed, nb); */
+ tree = sib2(tree); goto tailcall;
+ case TChoice: /* must check both children */
+ nb = verifyrule(L, sib1(tree), passed, npassed, nb);
+ /* return verifyrule(L, sib2(tree), passed, npassed, nb); */
+ tree = sib2(tree); goto tailcall;
+ case TRule:
+ if (npassed >= MAXRULES)
+ return verifyerror(L, passed, npassed);
+ else {
+ passed[npassed++] = tree->key;
+ /* return verifyrule(L, sib1(tree), passed, npassed); */
+ tree = sib1(tree); goto tailcall;
+ }
+ case TGrammar:
+ return nullable(tree); /* sub-grammar cannot be left recursive */
+ default: assert(0); return 0;
+ }
+}
+
+
+static void verifygrammar (lua_State *L, TTree *grammar) {
+ int passed[MAXRULES];
+ TTree *rule;
+ /* check left-recursive rules */
+ for (rule = sib1(grammar); rule->tag == TRule; rule = sib2(rule)) {
+ if (rule->key == 0) continue; /* unused rule */
+ verifyrule(L, sib1(rule), passed, 0, 0);
+ }
+ assert(rule->tag == TTrue);
+ /* check infinite loops inside rules */
+ for (rule = sib1(grammar); rule->tag == TRule; rule = sib2(rule)) {
+ if (rule->key == 0) continue; /* unused rule */
+ if (checkloops(sib1(rule))) {
+ lua_rawgeti(L, -1, rule->key); /* get rule's key */
+ luaL_error(L, "empty loop in rule '%s'", val2str(L, -1));
+ }
+ }
+ assert(rule->tag == TTrue);
+}
+
+
+/*
+** Give a name for the initial rule if it is not referenced
+*/
+static void initialrulename (lua_State *L, TTree *grammar, int frule) {
+ if (sib1(grammar)->key == 0) { /* initial rule is not referenced? */
+ int n = lua_rawlen(L, -1) + 1; /* index for name */
+ lua_pushvalue(L, frule); /* rule's name */
+ lua_rawseti(L, -2, n); /* ktable was on the top of the stack */
+ sib1(grammar)->key = n;
+ }
+}
+
+
+static TTree *newgrammar (lua_State *L, int arg) {
+ int treesize;
+ int frule = lua_gettop(L) + 2; /* position of first rule's key */
+ int n = collectrules(L, arg, &treesize);
+ TTree *g = newtree(L, treesize);
+ luaL_argcheck(L, n <= MAXRULES, arg, "grammar has too many rules");
+ g->tag = TGrammar; g->u.n = n;
+ lua_newtable(L); /* create 'ktable' */
+ lua_setuservalue(L, -2);
+ buildgrammar(L, g, frule, n);
+ lua_getuservalue(L, -1); /* get 'ktable' for new tree */
+ finalfix(L, frule - 1, g, sib1(g));
+ initialrulename(L, g, frule);
+ verifygrammar(L, g);
+ lua_pop(L, 1); /* remove 'ktable' */
+ lua_insert(L, -(n * 2 + 2)); /* move new table to proper position */
+ lua_pop(L, n * 2 + 1); /* remove position table + rule pairs */
+ return g; /* new table at the top of the stack */
+}
+
+/* }====================================================== */
+
+
+static Instruction *prepcompile (lua_State *L, Pattern *p, int idx) {
+ lua_getuservalue(L, idx); /* push 'ktable' (may be used by 'finalfix') */
+ finalfix(L, 0, NULL, p->tree);
+ lua_pop(L, 1); /* remove 'ktable' */
+ return compile(L, p);
+}
+
+
+static int lp_printtree (lua_State *L) {
+ TTree *tree = getpatt(L, 1, NULL);
+ int c = lua_toboolean(L, 2);
+ if (c) {
+ lua_getuservalue(L, 1); /* push 'ktable' (may be used by 'finalfix') */
+ finalfix(L, 0, NULL, tree);
+ lua_pop(L, 1); /* remove 'ktable' */
+ }
+ printktable(L, 1);
+ printtree(tree, 0);
+ return 0;
+}
+
+
+static int lp_printcode (lua_State *L) {
+ Pattern *p = getpattern(L, 1);
+ printktable(L, 1);
+ if (p->code == NULL) /* not compiled yet? */
+ prepcompile(L, p, 1);
+ printpatt(p->code, p->codesize);
+ return 0;
+}
+
+
+/*
+** Get the initial position for the match, interpreting negative
+** values from the end of the subject
+*/
+static size_t initposition (lua_State *L, size_t len) {
+ lua_Integer ii = luaL_optinteger(L, 3, 1);
+ if (ii > 0) { /* positive index? */
+ if ((size_t)ii <= len) /* inside the string? */
+ return (size_t)ii - 1; /* return it (corrected to 0-base) */
+ else return len; /* crop at the end */
+ }
+ else { /* negative index */
+ if ((size_t)(-ii) <= len) /* inside the string? */
+ return len - ((size_t)(-ii)); /* return position from the end */
+ else return 0; /* crop at the beginning */
+ }
+}
+
+
+/*
+** Main match function
+*/
+static int lp_match (lua_State *L) {
+ Capture capture[INITCAPSIZE];
+ const char *r;
+ size_t l;
+ Pattern *p = (getpatt(L, 1, NULL), getpattern(L, 1));
+ Instruction *code = (p->code != NULL) ? p->code : prepcompile(L, p, 1);
+ const char *s = luaL_checklstring(L, SUBJIDX, &l);
+ size_t i = initposition(L, l);
+ int ptop = lua_gettop(L);
+ lua_pushnil(L); /* initialize subscache */
+ lua_pushlightuserdata(L, capture); /* initialize caplistidx */
+ lua_getuservalue(L, 1); /* initialize penvidx */
+ r = match(L, s, s + i, s + l, code, capture, ptop);
+ if (r == NULL) {
+ lua_pushnil(L);
+ return 1;
+ }
+ return getcaptures(L, s, r, ptop);
+}
+
+
+
+/*
+** {======================================================
+** Library creation and functions not related to matching
+** =======================================================
+*/
+
+/* maximum limit for stack size */
+#define MAXLIM (INT_MAX / 100)
+
+static int lp_setmax (lua_State *L) {
+ lua_Integer lim = luaL_checkinteger(L, 1);
+ luaL_argcheck(L, 0 < lim && lim <= MAXLIM, 1, "out of range");
+ lua_settop(L, 1);
+ lua_setfield(L, LUA_REGISTRYINDEX, MAXSTACKIDX);
+ return 0;
+}
+
+
+static int lp_version (lua_State *L) {
+ lua_pushstring(L, VERSION);
+ return 1;
+}
+
+
+static int lp_type (lua_State *L) {
+ if (testpattern(L, 1))
+ lua_pushliteral(L, "pattern");
+ else
+ lua_pushnil(L);
+ return 1;
+}
+
+
+int lp_gc (lua_State *L) {
+ Pattern *p = getpattern(L, 1);
+ realloccode(L, p, 0); /* delete code block */
+ return 0;
+}
+
+
+static void createcat (lua_State *L, const char *catname, int (catf) (int)) {
+ TTree *t = newcharset(L);
+ int i;
+ for (i = 0; i <= UCHAR_MAX; i++)
+ if (catf(i)) setchar(treebuffer(t), i);
+ lua_setfield(L, -2, catname);
+}
+
+
+static int lp_locale (lua_State *L) {
+ if (lua_isnoneornil(L, 1)) {
+ lua_settop(L, 0);
+ lua_createtable(L, 0, 12);
+ }
+ else {
+ luaL_checktype(L, 1, LUA_TTABLE);
+ lua_settop(L, 1);
+ }
+ createcat(L, "alnum", isalnum);
+ createcat(L, "alpha", isalpha);
+ createcat(L, "cntrl", iscntrl);
+ createcat(L, "digit", isdigit);
+ createcat(L, "graph", isgraph);
+ createcat(L, "lower", islower);
+ createcat(L, "print", isprint);
+ createcat(L, "punct", ispunct);
+ createcat(L, "space", isspace);
+ createcat(L, "upper", isupper);
+ createcat(L, "xdigit", isxdigit);
+ return 1;
+}
+
+
+static struct luaL_Reg pattreg[] = {
+ {"ptree", lp_printtree},
+ {"pcode", lp_printcode},
+ {"match", lp_match},
+ {"B", lp_behind},
+ {"V", lp_V},
+ {"C", lp_simplecapture},
+ {"Cc", lp_constcapture},
+ {"Cmt", lp_matchtime},
+ {"Cb", lp_backref},
+ {"Carg", lp_argcapture},
+ {"Cp", lp_poscapture},
+ {"Cs", lp_substcapture},
+ {"Ct", lp_tablecapture},
+ {"Cf", lp_foldcapture},
+ {"Cg", lp_groupcapture},
+ {"P", lp_P},
+ {"S", lp_set},
+ {"R", lp_range},
+ {"locale", lp_locale},
+ {"version", lp_version},
+ {"setmaxstack", lp_setmax},
+ {"type", lp_type},
+ {NULL, NULL}
+};
+
+
+static struct luaL_Reg metareg[] = {
+ {"__mul", lp_seq},
+ {"__add", lp_choice},
+ {"__pow", lp_star},
+ {"__gc", lp_gc},
+ {"__len", lp_and},
+ {"__div", lp_divcapture},
+ {"__unm", lp_not},
+ {"__sub", lp_sub},
+ {NULL, NULL}
+};
+
+
+int luaopen_lpeg (lua_State *L);
+int luaopen_lpeg (lua_State *L) {
+ luaL_newmetatable(L, PATTERN_T);
+ lua_pushnumber(L, MAXBACK); /* initialize maximum backtracking */
+ lua_setfield(L, LUA_REGISTRYINDEX, MAXSTACKIDX);
+ luaL_setfuncs(L, metareg, 0);
+ luaL_newlib(L, pattreg);
+ lua_pushvalue(L, -1);
+ lua_setfield(L, -3, "__index");
+ return 1;
+}
+
+/* }====================================================== */
diff --git a/lua/src/lptree.h b/lua/src/lptree.h
new file mode 100644
index 000000000..b69528a6f
--- /dev/null
+++ b/lua/src/lptree.h
@@ -0,0 +1,77 @@
+/*
+** $Id: lptree.h,v 1.2 2013/03/24 13:51:12 roberto Exp $
+*/
+
+#if !defined(lptree_h)
+#define lptree_h
+
+
+#include "lptypes.h"
+
+
+/*
+** types of trees
+*/
+typedef enum TTag {
+ TChar = 0, TSet, TAny, /* standard PEG elements */
+ TTrue, TFalse,
+ TRep,
+ TSeq, TChoice,
+ TNot, TAnd,
+ TCall,
+ TOpenCall,
+ TRule, /* sib1 is rule's pattern, sib2 is 'next' rule */
+ TGrammar, /* sib1 is initial (and first) rule */
+ TBehind, /* match behind */
+ TCapture, /* regular capture */
+ TRunTime /* run-time capture */
+} TTag;
+
+/* number of siblings for each tree */
+extern const byte numsiblings[];
+
+
+/*
+** Tree trees
+** The first sibling of a tree (if there is one) is immediately after
+** the tree. A reference to a second sibling (ps) is its position
+** relative to the position of the tree itself. A key in ktable
+** uses the (unique) address of the original tree that created that
+** entry. NULL means no data.
+*/
+typedef struct TTree {
+ byte tag;
+ byte cap; /* kind of capture (if it is a capture) */
+ unsigned short key; /* key in ktable for Lua data (0 if no key) */
+ union {
+ int ps; /* occasional second sibling */
+ int n; /* occasional counter */
+ } u;
+} TTree;
+
+
+/*
+** A complete pattern has its tree plus, if already compiled,
+** its corresponding code
+*/
+typedef struct Pattern {
+ union Instruction *code;
+ int codesize;
+ TTree tree[1];
+} Pattern;
+
+
+/* number of siblings for each tree */
+extern const byte numsiblings[];
+
+/* access to siblings */
+#define sib1(t) ((t) + 1)
+#define sib2(t) ((t) + (t)->u.ps)
+
+
+
+
+
+
+#endif
+
diff --git a/lua/src/lptypes.h b/lua/src/lptypes.h
new file mode 100644
index 000000000..5eb7987b7
--- /dev/null
+++ b/lua/src/lptypes.h
@@ -0,0 +1,149 @@
+/*
+** $Id: lptypes.h,v 1.14 2015/09/28 17:17:41 roberto Exp $
+** LPeg - PEG pattern matching for Lua
+** Copyright 2007-2015, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+** written by Roberto Ierusalimschy
+*/
+
+#if !defined(lptypes_h)
+#define lptypes_h
+
+
+#if !defined(LPEG_DEBUG)
+#define NDEBUG
+#endif
+
+#include <assert.h>
+#include <limits.h>
+
+#include "lua.h"
+
+
+#define VERSION "1.0.0"
+
+
+#define PATTERN_T "lpeg-pattern"
+#define MAXSTACKIDX "lpeg-maxstack"
+
+
+/*
+** compatibility with Lua 5.1
+*/
+#if (LUA_VERSION_NUM == 501)
+
+#define lp_equal lua_equal
+
+#define lua_getuservalue lua_getfenv
+#define lua_setuservalue lua_setfenv
+
+#define lua_rawlen lua_objlen
+
+#define luaL_setfuncs(L,f,n) luaL_register(L,NULL,f)
+#define luaL_newlib(L,f) luaL_register(L,"lpeg",f)
+
+#endif
+
+
+#if !defined(lp_equal)
+#define lp_equal(L,idx1,idx2) lua_compare(L,(idx1),(idx2),LUA_OPEQ)
+#endif
+
+
+/* default maximum size for call/backtrack stack */
+#if !defined(MAXBACK)
+#define MAXBACK 400
+#endif
+
+
+/* maximum number of rules in a grammar */
+#if !defined(MAXRULES)
+#define MAXRULES 1000
+#endif
+
+
+
+/* initial size for capture's list */
+#define INITCAPSIZE 32
+
+
+/* index, on Lua stack, for subject */
+#define SUBJIDX 2
+
+/* number of fixed arguments to 'match' (before capture arguments) */
+#define FIXEDARGS 3
+
+/* index, on Lua stack, for capture list */
+#define caplistidx(ptop) ((ptop) + 2)
+
+/* index, on Lua stack, for pattern's ktable */
+#define ktableidx(ptop) ((ptop) + 3)
+
+/* index, on Lua stack, for backtracking stack */
+#define stackidx(ptop) ((ptop) + 4)
+
+
+
+typedef unsigned char byte;
+
+
+#define BITSPERCHAR 8
+
+#define CHARSETSIZE ((UCHAR_MAX/BITSPERCHAR) + 1)
+
+
+
+typedef struct Charset {
+ byte cs[CHARSETSIZE];
+} Charset;
+
+
+
+#define loopset(v,b) { int v; for (v = 0; v < CHARSETSIZE; v++) {b;} }
+
+/* access to charset */
+#define treebuffer(t) ((byte *)((t) + 1))
+
+/* number of slots needed for 'n' bytes */
+#define bytes2slots(n) (((n) - 1) / sizeof(TTree) + 1)
+
+/* set 'b' bit in charset 'cs' */
+#define setchar(cs,b) ((cs)[(b) >> 3] |= (1 << ((b) & 7)))
+
+
+/*
+** in capture instructions, 'kind' of capture and its offset are
+** packed in field 'aux', 4 bits for each
+*/
+#define getkind(op) ((op)->i.aux & 0xF)
+#define getoff(op) (((op)->i.aux >> 4) & 0xF)
+#define joinkindoff(k,o) ((k) | ((o) << 4))
+
+#define MAXOFF 0xF
+#define MAXAUX 0xFF
+
+
+/* maximum number of bytes to look behind */
+#define MAXBEHIND MAXAUX
+
+
+/* maximum size (in elements) for a pattern */
+#define MAXPATTSIZE (SHRT_MAX - 10)
+
+
+/* size (in elements) for an instruction plus extra l bytes */
+#define instsize(l) (((l) + sizeof(Instruction) - 1)/sizeof(Instruction) + 1)
+
+
+/* size (in elements) for a ISet instruction */
+#define CHARSETINSTSIZE instsize(CHARSETSIZE)
+
+/* size (in elements) for a IFunc instruction */
+#define funcinstsize(p) ((p)->i.aux + 2)
+
+
+
+#define testchar(st,c) (((int)(st)[((c) >> 3)] & (1 << ((c) & 7))))
+
+
+#endif
+
diff --git a/lua/src/lpvm.c b/lua/src/lpvm.c
new file mode 100644
index 000000000..eaf2ebfd7
--- /dev/null
+++ b/lua/src/lpvm.c
@@ -0,0 +1,355 @@
+/*
+** $Id: lpvm.c,v 1.6 2015/09/28 17:01:25 roberto Exp $
+** Copyright 2007, Lua.org & PUC-Rio (see 'lpeg.html' for license)
+*/
+
+#include <limits.h>
+#include <string.h>
+
+
+#include "lua.h"
+#include "lauxlib.h"
+
+#include "lpcap.h"
+#include "lptypes.h"
+#include "lpvm.h"
+#include "lpprint.h"
+
+
+/* initial size for call/backtrack stack */
+#if !defined(INITBACK)
+#define INITBACK MAXBACK
+#endif
+
+
+#define getoffset(p) (((p) + 1)->offset)
+
+static const Instruction giveup = {{IGiveup, 0, 0}};
+
+
+/*
+** {======================================================
+** Virtual Machine
+** =======================================================
+*/
+
+
+typedef struct Stack {
+ const char *s; /* saved position (or NULL for calls) */
+ const Instruction *p; /* next instruction */
+ int caplevel;
+} Stack;
+
+
+#define getstackbase(L, ptop) ((Stack *)lua_touserdata(L, stackidx(ptop)))
+
+
+/*
+** Double the size of the array of captures
+*/
+static Capture *doublecap (lua_State *L, Capture *cap, int captop, int ptop) {
+ Capture *newc;
+ if (captop >= INT_MAX/((int)sizeof(Capture) * 2))
+ luaL_error(L, "too many captures");
+ newc = (Capture *)lua_newuserdata(L, captop * 2 * sizeof(Capture));
+ memcpy(newc, cap, captop * sizeof(Capture));
+ lua_replace(L, caplistidx(ptop));
+ return newc;
+}
+
+
+/*
+** Double the size of the stack
+*/
+static Stack *doublestack (lua_State *L, Stack **stacklimit, int ptop) {
+ Stack *stack = getstackbase(L, ptop);
+ Stack *newstack;
+ int n = *stacklimit - stack; /* current stack size */
+ int max, newn;
+ lua_getfield(L, LUA_REGISTRYINDEX, MAXSTACKIDX);
+ max = lua_tointeger(L, -1); /* maximum allowed size */
+ lua_pop(L, 1);
+ if (n >= max) /* already at maximum size? */
+ luaL_error(L, "backtrack stack overflow (current limit is %d)", max);
+ newn = 2 * n; /* new size */
+ if (newn > max) newn = max;
+ newstack = (Stack *)lua_newuserdata(L, newn * sizeof(Stack));
+ memcpy(newstack, stack, n * sizeof(Stack));
+ lua_replace(L, stackidx(ptop));
+ *stacklimit = newstack + newn;
+ return newstack + n; /* return next position */
+}
+
+
+/*
+** Interpret the result of a dynamic capture: false -> fail;
+** true -> keep current position; number -> next position.
+** Return new subject position. 'fr' is stack index where
+** is the result; 'curr' is current subject position; 'limit'
+** is subject's size.
+*/
+static int resdyncaptures (lua_State *L, int fr, int curr, int limit) {
+ lua_Integer res;
+ if (!lua_toboolean(L, fr)) { /* false value? */
+ lua_settop(L, fr - 1); /* remove results */
+ return -1; /* and fail */
+ }
+ else if (lua_isboolean(L, fr)) /* true? */
+ res = curr; /* keep current position */
+ else {
+ res = lua_tointeger(L, fr) - 1; /* new position */
+ if (res < curr || res > limit)
+ luaL_error(L, "invalid position returned by match-time capture");
+ }
+ lua_remove(L, fr); /* remove first result (offset) */
+ return res;
+}
+
+
+/*
+** Add capture values returned by a dynamic capture to the capture list
+** 'base', nested inside a group capture. 'fd' indexes the first capture
+** value, 'n' is the number of values (at least 1).
+*/
+static void adddyncaptures (const char *s, Capture *base, int n, int fd) {
+ int i;
+ /* Cgroup capture is already there */
+ assert(base[0].kind == Cgroup && base[0].siz == 0);
+ base[0].idx = 0; /* make it an anonymous group */
+ for (i = 1; i <= n; i++) { /* add runtime captures */
+ base[i].kind = Cruntime;
+ base[i].siz = 1; /* mark it as closed */
+ base[i].idx = fd + i - 1; /* stack index of capture value */
+ base[i].s = s;
+ }
+ base[i].kind = Cclose; /* close group */
+ base[i].siz = 1;
+ base[i].s = s;
+}
+
+
+/*
+** Remove dynamic captures from the Lua stack (called in case of failure)
+*/
+static int removedyncap (lua_State *L, Capture *capture,
+ int level, int last) {
+ int id = finddyncap(capture + level, capture + last); /* index of 1st cap. */
+ int top = lua_gettop(L);
+ if (id == 0) return 0; /* no dynamic captures? */
+ lua_settop(L, id - 1); /* remove captures */
+ return top - id + 1; /* number of values removed */
+}
+
+
+/*
+** Opcode interpreter
+*/
+const char *match (lua_State *L, const char *o, const char *s, const char *e,
+ Instruction *op, Capture *capture, int ptop) {
+ Stack stackbase[INITBACK];
+ Stack *stacklimit = stackbase + INITBACK;
+ Stack *stack = stackbase; /* point to first empty slot in stack */
+ int capsize = INITCAPSIZE;
+ int captop = 0; /* point to first empty slot in captures */
+ int ndyncap = 0; /* number of dynamic captures (in Lua stack) */
+ const Instruction *p = op; /* current instruction */
+ stack->p = &giveup; stack->s = s; stack->caplevel = 0; stack++;
+ lua_pushlightuserdata(L, stackbase);
+ for (;;) {
+#if defined(DEBUG)
+ printf("s: |%s| stck:%d, dyncaps:%d, caps:%d ",
+ s, stack - getstackbase(L, ptop), ndyncap, captop);
+ printinst(op, p);
+ printcaplist(capture, capture + captop);
+#endif
+ assert(stackidx(ptop) + ndyncap == lua_gettop(L) && ndyncap <= captop);
+ switch ((Opcode)p->i.code) {
+ case IEnd: {
+ assert(stack == getstackbase(L, ptop) + 1);
+ capture[captop].kind = Cclose;
+ capture[captop].s = NULL;
+ return s;
+ }
+ case IGiveup: {
+ assert(stack == getstackbase(L, ptop));
+ return NULL;
+ }
+ case IRet: {
+ assert(stack > getstackbase(L, ptop) && (stack - 1)->s == NULL);
+ p = (--stack)->p;
+ continue;
+ }
+ case IAny: {
+ if (s < e) { p++; s++; }
+ else goto fail;
+ continue;
+ }
+ case ITestAny: {
+ if (s < e) p += 2;
+ else p += getoffset(p);
+ continue;
+ }
+ case IChar: {
+ if ((byte)*s == p->i.aux && s < e) { p++; s++; }
+ else goto fail;
+ continue;
+ }
+ case ITestChar: {
+ if ((byte)*s == p->i.aux && s < e) p += 2;
+ else p += getoffset(p);
+ continue;
+ }
+ case ISet: {
+ int c = (byte)*s;
+ if (testchar((p+1)->buff, c) && s < e)
+ { p += CHARSETINSTSIZE; s++; }
+ else goto fail;
+ continue;
+ }
+ case ITestSet: {
+ int c = (byte)*s;
+ if (testchar((p + 2)->buff, c) && s < e)
+ p += 1 + CHARSETINSTSIZE;
+ else p += getoffset(p);
+ continue;
+ }
+ case IBehind: {
+ int n = p->i.aux;
+ if (n > s - o) goto fail;
+ s -= n; p++;
+ continue;
+ }
+ case ISpan: {
+ for (; s < e; s++) {
+ int c = (byte)*s;
+ if (!testchar((p+1)->buff, c)) break;
+ }
+ p += CHARSETINSTSIZE;
+ continue;
+ }
+ case IJmp: {
+ p += getoffset(p);
+ continue;
+ }
+ case IChoice: {
+ if (stack == stacklimit)
+ stack = doublestack(L, &stacklimit, ptop);
+ stack->p = p + getoffset(p);
+ stack->s = s;
+ stack->caplevel = captop;
+ stack++;
+ p += 2;
+ continue;
+ }
+ case ICall: {
+ if (stack == stacklimit)
+ stack = doublestack(L, &stacklimit, ptop);
+ stack->s = NULL;
+ stack->p = p + 2; /* save return address */
+ stack++;
+ p += getoffset(p);
+ continue;
+ }
+ case ICommit: {
+ assert(stack > getstackbase(L, ptop) && (stack - 1)->s != NULL);
+ stack--;
+ p += getoffset(p);
+ continue;
+ }
+ case IPartialCommit: {
+ assert(stack > getstackbase(L, ptop) && (stack - 1)->s != NULL);
+ (stack - 1)->s = s;
+ (stack - 1)->caplevel = captop;
+ p += getoffset(p);
+ continue;
+ }
+ case IBackCommit: {
+ assert(stack > getstackbase(L, ptop) && (stack - 1)->s != NULL);
+ s = (--stack)->s;
+ captop = stack->caplevel;
+ p += getoffset(p);
+ continue;
+ }
+ case IFailTwice:
+ assert(stack > getstackbase(L, ptop));
+ stack--;
+ /* go through */
+ case IFail:
+ fail: { /* pattern failed: try to backtrack */
+ do { /* remove pending calls */
+ assert(stack > getstackbase(L, ptop));
+ s = (--stack)->s;
+ } while (s == NULL);
+ if (ndyncap > 0) /* is there matchtime captures? */
+ ndyncap -= removedyncap(L, capture, stack->caplevel, captop);
+ captop = stack->caplevel;
+ p = stack->p;
+ continue;
+ }
+ case ICloseRunTime: {
+ CapState cs;
+ int rem, res, n;
+ int fr = lua_gettop(L) + 1; /* stack index of first result */
+ cs.s = o; cs.L = L; cs.ocap = capture; cs.ptop = ptop;
+ n = runtimecap(&cs, capture + captop, s, &rem); /* call function */
+ captop -= n; /* remove nested captures */
+ fr -= rem; /* 'rem' items were popped from Lua stack */
+ res = resdyncaptures(L, fr, s - o, e - o); /* get result */
+ if (res == -1) /* fail? */
+ goto fail;
+ s = o + res; /* else update current position */
+ n = lua_gettop(L) - fr + 1; /* number of new captures */
+ ndyncap += n - rem; /* update number of dynamic captures */
+ if (n > 0) { /* any new capture? */
+ if ((captop += n + 2) >= capsize) {
+ capture = doublecap(L, capture, captop, ptop);
+ capsize = 2 * captop;
+ }
+ /* add new captures to 'capture' list */
+ adddyncaptures(s, capture + captop - n - 2, n, fr);
+ }
+ p++;
+ continue;
+ }
+ case ICloseCapture: {
+ const char *s1 = s;
+ assert(captop > 0);
+ /* if possible, turn capture into a full capture */
+ if (capture[captop - 1].siz == 0 &&
+ s1 - capture[captop - 1].s < UCHAR_MAX) {
+ capture[captop - 1].siz = s1 - capture[captop - 1].s + 1;
+ p++;
+ continue;
+ }
+ else {
+ capture[captop].siz = 1; /* mark entry as closed */
+ capture[captop].s = s;
+ goto pushcapture;
+ }
+ }
+ case IOpenCapture:
+ capture[captop].siz = 0; /* mark entry as open */
+ capture[captop].s = s;
+ goto pushcapture;
+ case IFullCapture:
+ capture[captop].siz = getoff(p) + 1; /* save capture size */
+ capture[captop].s = s - getoff(p);
+ /* goto pushcapture; */
+ pushcapture: {
+ capture[captop].idx = p->i.key;
+ capture[captop].kind = getkind(p);
+ if (++captop >= capsize) {
+ capture = doublecap(L, capture, captop, ptop);
+ capsize = 2 * captop;
+ }
+ p++;
+ continue;
+ }
+ default: assert(0); return NULL;
+ }
+ }
+}
+
+/* }====================================================== */
+
+
diff --git a/lua/src/lpvm.h b/lua/src/lpvm.h
new file mode 100644
index 000000000..757b9e135
--- /dev/null
+++ b/lua/src/lpvm.h
@@ -0,0 +1,58 @@
+/*
+** $Id: lpvm.h,v 1.3 2014/02/21 13:06:41 roberto Exp $
+*/
+
+#if !defined(lpvm_h)
+#define lpvm_h
+
+#include "lpcap.h"
+
+
+/* Virtual Machine's instructions */
+typedef enum Opcode {
+ IAny, /* if no char, fail */
+ IChar, /* if char != aux, fail */
+ ISet, /* if char not in buff, fail */
+ ITestAny, /* in no char, jump to 'offset' */
+ ITestChar, /* if char != aux, jump to 'offset' */
+ ITestSet, /* if char not in buff, jump to 'offset' */
+ ISpan, /* read a span of chars in buff */
+ IBehind, /* walk back 'aux' characters (fail if not possible) */
+ IRet, /* return from a rule */
+ IEnd, /* end of pattern */
+ IChoice, /* stack a choice; next fail will jump to 'offset' */
+ IJmp, /* jump to 'offset' */
+ ICall, /* call rule at 'offset' */
+ IOpenCall, /* call rule number 'key' (must be closed to a ICall) */
+ ICommit, /* pop choice and jump to 'offset' */
+ IPartialCommit, /* update top choice to current position and jump */
+ IBackCommit, /* "fails" but jump to its own 'offset' */
+ IFailTwice, /* pop one choice and then fail */
+ IFail, /* go back to saved state on choice and jump to saved offset */
+ IGiveup, /* internal use */
+ IFullCapture, /* complete capture of last 'off' chars */
+ IOpenCapture, /* start a capture */
+ ICloseCapture,
+ ICloseRunTime
+} Opcode;
+
+
+
+typedef union Instruction {
+ struct Inst {
+ byte code;
+ byte aux;
+ short key;
+ } i;
+ int offset;
+ byte buff[1];
+} Instruction;
+
+
+void printpatt (Instruction *p, int n);
+const char *match (lua_State *L, const char *o, const char *s, const char *e,
+ Instruction *op, Capture *capture, int ptop);
+
+
+#endif
+
diff --git a/lua/src/lstate.c b/lua/src/lstate.c
new file mode 100644
index 000000000..9194ac341
--- /dev/null
+++ b/lua/src/lstate.c
@@ -0,0 +1,347 @@
+/*
+** $Id: lstate.c,v 2.133 2015/11/13 12:16:51 roberto Exp $
+** Global State
+** See Copyright Notice in lua.h
+*/
+
+#define lstate_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <stddef.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lapi.h"
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lgc.h"
+#include "llex.h"
+#include "lmem.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+
+
+#if !defined(LUAI_GCPAUSE)
+#define LUAI_GCPAUSE 200 /* 200% */
+#endif
+
+#if !defined(LUAI_GCMUL)
+#define LUAI_GCMUL 200 /* GC runs 'twice the speed' of memory allocation */
+#endif
+
+
+/*
+** a macro to help the creation of a unique random seed when a state is
+** created; the seed is used to randomize hashes.
+*/
+#if !defined(luai_makeseed)
+#include <time.h>
+#define luai_makeseed() cast(unsigned int, time(NULL))
+#endif
+
+
+
+/*
+** thread state + extra space
+*/
+typedef struct LX {
+ lu_byte extra_[LUA_EXTRASPACE];
+ lua_State l;
+} LX;
+
+
+/*
+** Main thread combines a thread state and the global state
+*/
+typedef struct LG {
+ LX l;
+ global_State g;
+} LG;
+
+
+
+#define fromstate(L) (cast(LX *, cast(lu_byte *, (L)) - offsetof(LX, l)))
+
+
+/*
+** Compute an initial seed as random as possible. Rely on Address Space
+** Layout Randomization (if present) to increase randomness..
+*/
+#define addbuff(b,p,e) \
+ { size_t t = cast(size_t, e); \
+ memcpy(b + p, &t, sizeof(t)); p += sizeof(t); }
+
+static unsigned int makeseed (lua_State *L) {
+ char buff[4 * sizeof(size_t)];
+ unsigned int h = luai_makeseed();
+ int p = 0;
+ addbuff(buff, p, L); /* heap variable */
+ addbuff(buff, p, &h); /* local variable */
+ addbuff(buff, p, luaO_nilobject); /* global variable */
+ addbuff(buff, p, &lua_newstate); /* public function */
+ lua_assert(p == sizeof(buff));
+ return luaS_hash(buff, p, h);
+}
+
+
+/*
+** set GCdebt to a new value keeping the value (totalbytes + GCdebt)
+** invariant (and avoiding underflows in 'totalbytes')
+*/
+void luaE_setdebt (global_State *g, l_mem debt) {
+ l_mem tb = gettotalbytes(g);
+ lua_assert(tb > 0);
+ if (debt < tb - MAX_LMEM)
+ debt = tb - MAX_LMEM; /* will make 'totalbytes == MAX_LMEM' */
+ g->totalbytes = tb - debt;
+ g->GCdebt = debt;
+}
+
+
+CallInfo *luaE_extendCI (lua_State *L) {
+ CallInfo *ci = luaM_new(L, CallInfo);
+ lua_assert(L->ci->next == NULL);
+ L->ci->next = ci;
+ ci->previous = L->ci;
+ ci->next = NULL;
+ L->nci++;
+ return ci;
+}
+
+
+/*
+** free all CallInfo structures not in use by a thread
+*/
+void luaE_freeCI (lua_State *L) {
+ CallInfo *ci = L->ci;
+ CallInfo *next = ci->next;
+ ci->next = NULL;
+ while ((ci = next) != NULL) {
+ next = ci->next;
+ luaM_free(L, ci);
+ L->nci--;
+ }
+}
+
+
+/*
+** free half of the CallInfo structures not in use by a thread
+*/
+void luaE_shrinkCI (lua_State *L) {
+ CallInfo *ci = L->ci;
+ CallInfo *next2; /* next's next */
+ /* while there are two nexts */
+ while (ci->next != NULL && (next2 = ci->next->next) != NULL) {
+ luaM_free(L, ci->next); /* free next */
+ L->nci--;
+ ci->next = next2; /* remove 'next' from the list */
+ next2->previous = ci;
+ ci = next2; /* keep next's next */
+ }
+}
+
+
+static void stack_init (lua_State *L1, lua_State *L) {
+ int i; CallInfo *ci;
+ /* initialize stack array */
+ L1->stack = luaM_newvector(L, BASIC_STACK_SIZE, TValue);
+ L1->stacksize = BASIC_STACK_SIZE;
+ for (i = 0; i < BASIC_STACK_SIZE; i++)
+ setnilvalue(L1->stack + i); /* erase new stack */
+ L1->top = L1->stack;
+ L1->stack_last = L1->stack + L1->stacksize - EXTRA_STACK;
+ /* initialize first ci */
+ ci = &L1->base_ci;
+ ci->next = ci->previous = NULL;
+ ci->callstatus = 0;
+ ci->func = L1->top;
+ setnilvalue(L1->top++); /* 'function' entry for this 'ci' */
+ ci->top = L1->top + LUA_MINSTACK;
+ L1->ci = ci;
+}
+
+
+static void freestack (lua_State *L) {
+ if (L->stack == NULL)
+ return; /* stack not completely built yet */
+ L->ci = &L->base_ci; /* free the entire 'ci' list */
+ luaE_freeCI(L);
+ lua_assert(L->nci == 0);
+ luaM_freearray(L, L->stack, L->stacksize); /* free stack array */
+}
+
+
+/*
+** Create registry table and its predefined values
+*/
+static void init_registry (lua_State *L, global_State *g) {
+ TValue temp;
+ /* create registry */
+ Table *registry = luaH_new(L);
+ sethvalue(L, &g->l_registry, registry);
+ luaH_resize(L, registry, LUA_RIDX_LAST, 0);
+ /* registry[LUA_RIDX_MAINTHREAD] = L */
+ setthvalue(L, &temp, L); /* temp = L */
+ luaH_setint(L, registry, LUA_RIDX_MAINTHREAD, &temp);
+ /* registry[LUA_RIDX_GLOBALS] = table of globals */
+ sethvalue(L, &temp, luaH_new(L)); /* temp = new table (global table) */
+ luaH_setint(L, registry, LUA_RIDX_GLOBALS, &temp);
+}
+
+
+/*
+** open parts of the state that may cause memory-allocation errors.
+** ('g->version' != NULL flags that the state was completely build)
+*/
+static void f_luaopen (lua_State *L, void *ud) {
+ global_State *g = G(L);
+ UNUSED(ud);
+ stack_init(L, L); /* init stack */
+ init_registry(L, g);
+ luaS_init(L);
+ luaT_init(L);
+ luaX_init(L);
+ g->gcrunning = 1; /* allow gc */
+ g->version = lua_version(NULL);
+ luai_userstateopen(L);
+}
+
+
+/*
+** preinitialize a thread with consistent values without allocating
+** any memory (to avoid errors)
+*/
+static void preinit_thread (lua_State *L, global_State *g) {
+ G(L) = g;
+ L->stack = NULL;
+ L->ci = NULL;
+ L->nci = 0;
+ L->stacksize = 0;
+ L->twups = L; /* thread has no upvalues */
+ L->errorJmp = NULL;
+ L->nCcalls = 0;
+ L->hook = NULL;
+ L->hookmask = 0;
+ L->basehookcount = 0;
+ L->allowhook = 1;
+ resethookcount(L);
+ L->openupval = NULL;
+ L->nny = 1;
+ L->status = LUA_OK;
+ L->errfunc = 0;
+}
+
+
+static void close_state (lua_State *L) {
+ global_State *g = G(L);
+ luaF_close(L, L->stack); /* close all upvalues for this thread */
+ luaC_freeallobjects(L); /* collect all objects */
+ if (g->version) /* closing a fully built state? */
+ luai_userstateclose(L);
+ luaM_freearray(L, G(L)->strt.hash, G(L)->strt.size);
+ freestack(L);
+ lua_assert(gettotalbytes(g) == sizeof(LG));
+ (*g->frealloc)(g->ud, fromstate(L), sizeof(LG), 0); /* free main block */
+}
+
+
+LUA_API lua_State *lua_newthread (lua_State *L) {
+ global_State *g = G(L);
+ lua_State *L1;
+ lua_lock(L);
+ luaC_checkGC(L);
+ /* create new thread */
+ L1 = &cast(LX *, luaM_newobject(L, LUA_TTHREAD, sizeof(LX)))->l;
+ L1->marked = luaC_white(g);
+ L1->tt = LUA_TTHREAD;
+ /* link it on list 'allgc' */
+ L1->next = g->allgc;
+ g->allgc = obj2gco(L1);
+ /* anchor it on L stack */
+ setthvalue(L, L->top, L1);
+ api_incr_top(L);
+ preinit_thread(L1, g);
+ L1->hookmask = L->hookmask;
+ L1->basehookcount = L->basehookcount;
+ L1->hook = L->hook;
+ resethookcount(L1);
+ /* initialize L1 extra space */
+ memcpy(lua_getextraspace(L1), lua_getextraspace(g->mainthread),
+ LUA_EXTRASPACE);
+ luai_userstatethread(L, L1);
+ stack_init(L1, L); /* init stack */
+ lua_unlock(L);
+ return L1;
+}
+
+
+void luaE_freethread (lua_State *L, lua_State *L1) {
+ LX *l = fromstate(L1);
+ luaF_close(L1, L1->stack); /* close all upvalues for this thread */
+ lua_assert(L1->openupval == NULL);
+ luai_userstatefree(L, L1);
+ freestack(L1);
+ luaM_free(L, l);
+}
+
+
+LUA_API lua_State *lua_newstate (lua_Alloc f, void *ud) {
+ int i;
+ lua_State *L;
+ global_State *g;
+ LG *l = cast(LG *, (*f)(ud, NULL, LUA_TTHREAD, sizeof(LG)));
+ if (l == NULL) return NULL;
+ L = &l->l.l;
+ g = &l->g;
+ L->next = NULL;
+ L->tt = LUA_TTHREAD;
+ g->currentwhite = bitmask(WHITE0BIT);
+ L->marked = luaC_white(g);
+ preinit_thread(L, g);
+ g->frealloc = f;
+ g->ud = ud;
+ g->mainthread = L;
+ g->seed = makeseed(L);
+ g->gcrunning = 0; /* no GC while building state */
+ g->GCestimate = 0;
+ g->strt.size = g->strt.nuse = 0;
+ g->strt.hash = NULL;
+ setnilvalue(&g->l_registry);
+ g->panic = NULL;
+ g->version = NULL;
+ g->gcstate = GCSpause;
+ g->gckind = KGC_NORMAL;
+ g->allgc = g->finobj = g->tobefnz = g->fixedgc = NULL;
+ g->sweepgc = NULL;
+ g->gray = g->grayagain = NULL;
+ g->weak = g->ephemeron = g->allweak = NULL;
+ g->twups = NULL;
+ g->totalbytes = sizeof(LG);
+ g->GCdebt = 0;
+ g->gcfinnum = 0;
+ g->gcpause = LUAI_GCPAUSE;
+ g->gcstepmul = LUAI_GCMUL;
+ for (i=0; i < LUA_NUMTAGS; i++) g->mt[i] = NULL;
+ if (luaD_rawrunprotected(L, f_luaopen, NULL) != LUA_OK) {
+ /* memory allocation error: free partial state */
+ close_state(L);
+ L = NULL;
+ }
+ return L;
+}
+
+
+LUA_API void lua_close (lua_State *L) {
+ L = G(L)->mainthread; /* only the main thread can be closed */
+ lua_lock(L);
+ close_state(L);
+}
+
+
diff --git a/lua/src/lstate.h b/lua/src/lstate.h
new file mode 100644
index 000000000..a469466c4
--- /dev/null
+++ b/lua/src/lstate.h
@@ -0,0 +1,235 @@
+/*
+** $Id: lstate.h,v 2.133 2016/12/22 13:08:50 roberto Exp $
+** Global State
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lstate_h
+#define lstate_h
+
+#include "lua.h"
+
+#include "lobject.h"
+#include "ltm.h"
+#include "lzio.h"
+
+
+/*
+
+** Some notes about garbage-collected objects: All objects in Lua must
+** be kept somehow accessible until being freed, so all objects always
+** belong to one (and only one) of these lists, using field 'next' of
+** the 'CommonHeader' for the link:
+**
+** 'allgc': all objects not marked for finalization;
+** 'finobj': all objects marked for finalization;
+** 'tobefnz': all objects ready to be finalized;
+** 'fixedgc': all objects that are not to be collected (currently
+** only small strings, such as reserved words).
+
+*/
+
+
+struct lua_longjmp; /* defined in ldo.c */
+
+
+/*
+** Atomic type (relative to signals) to better ensure that 'lua_sethook'
+** is thread safe
+*/
+#if !defined(l_signalT)
+#include <signal.h>
+#define l_signalT sig_atomic_t
+#endif
+
+
+/* extra stack space to handle TM calls and some other extras */
+#define EXTRA_STACK 5
+
+
+#define BASIC_STACK_SIZE (2*LUA_MINSTACK)
+
+
+/* kinds of Garbage Collection */
+#define KGC_NORMAL 0
+#define KGC_EMERGENCY 1 /* gc was forced by an allocation failure */
+
+
+typedef struct stringtable {
+ TString **hash;
+ int nuse; /* number of elements */
+ int size;
+} stringtable;
+
+
+/*
+** Information about a call.
+** When a thread yields, 'func' is adjusted to pretend that the
+** top function has only the yielded values in its stack; in that
+** case, the actual 'func' value is saved in field 'extra'.
+** When a function calls another with a continuation, 'extra' keeps
+** the function index so that, in case of errors, the continuation
+** function can be called with the correct top.
+*/
+typedef struct CallInfo {
+ StkId func; /* function index in the stack */
+ StkId top; /* top for this function */
+ struct CallInfo *previous, *next; /* dynamic call link */
+ union {
+ struct { /* only for Lua functions */
+ StkId base; /* base for this function */
+ const Instruction *savedpc;
+ } l;
+ struct { /* only for C functions */
+ lua_KFunction k; /* continuation in case of yields */
+ ptrdiff_t old_errfunc;
+ lua_KContext ctx; /* context info. in case of yields */
+ } c;
+ } u;
+ ptrdiff_t extra;
+ short nresults; /* expected number of results from this function */
+ unsigned short callstatus;
+} CallInfo;
+
+
+/*
+** Bits in CallInfo status
+*/
+#define CIST_OAH (1<<0) /* original value of 'allowhook' */
+#define CIST_LUA (1<<1) /* call is running a Lua function */
+#define CIST_HOOKED (1<<2) /* call is running a debug hook */
+#define CIST_FRESH (1<<3) /* call is running on a fresh invocation
+ of luaV_execute */
+#define CIST_YPCALL (1<<4) /* call is a yieldable protected call */
+#define CIST_TAIL (1<<5) /* call was tail called */
+#define CIST_HOOKYIELD (1<<6) /* last hook called yielded */
+#define CIST_LEQ (1<<7) /* using __lt for __le */
+#define CIST_FIN (1<<8) /* call is running a finalizer */
+
+#define isLua(ci) ((ci)->callstatus & CIST_LUA)
+
+/* assume that CIST_OAH has offset 0 and that 'v' is strictly 0/1 */
+#define setoah(st,v) ((st) = ((st) & ~CIST_OAH) | (v))
+#define getoah(st) ((st) & CIST_OAH)
+
+
+/*
+** 'global state', shared by all threads of this state
+*/
+typedef struct global_State {
+ lua_Alloc frealloc; /* function to reallocate memory */
+ void *ud; /* auxiliary data to 'frealloc' */
+ l_mem totalbytes; /* number of bytes currently allocated - GCdebt */
+ l_mem GCdebt; /* bytes allocated not yet compensated by the collector */
+ lu_mem GCmemtrav; /* memory traversed by the GC */
+ lu_mem GCestimate; /* an estimate of the non-garbage memory in use */
+ stringtable strt; /* hash table for strings */
+ TValue l_registry;
+ unsigned int seed; /* randomized seed for hashes */
+ lu_byte currentwhite;
+ lu_byte gcstate; /* state of garbage collector */
+ lu_byte gckind; /* kind of GC running */
+ lu_byte gcrunning; /* true if GC is running */
+ GCObject *allgc; /* list of all collectable objects */
+ GCObject **sweepgc; /* current position of sweep in list */
+ GCObject *finobj; /* list of collectable objects with finalizers */
+ GCObject *gray; /* list of gray objects */
+ GCObject *grayagain; /* list of objects to be traversed atomically */
+ GCObject *weak; /* list of tables with weak values */
+ GCObject *ephemeron; /* list of ephemeron tables (weak keys) */
+ GCObject *allweak; /* list of all-weak tables */
+ GCObject *tobefnz; /* list of userdata to be GC */
+ GCObject *fixedgc; /* list of objects not to be collected */
+ struct lua_State *twups; /* list of threads with open upvalues */
+ unsigned int gcfinnum; /* number of finalizers to call in each GC step */
+ int gcpause; /* size of pause between successive GCs */
+ int gcstepmul; /* GC 'granularity' */
+ lua_CFunction panic; /* to be called in unprotected errors */
+ struct lua_State *mainthread;
+ const lua_Number *version; /* pointer to version number */
+ TString *memerrmsg; /* memory-error message */
+ TString *tmname[TM_N]; /* array with tag-method names */
+ struct Table *mt[LUA_NUMTAGS]; /* metatables for basic types */
+ TString *strcache[STRCACHE_N][STRCACHE_M]; /* cache for strings in API */
+} global_State;
+
+
+/*
+** 'per thread' state
+*/
+struct lua_State {
+ CommonHeader;
+ unsigned short nci; /* number of items in 'ci' list */
+ lu_byte status;
+ StkId top; /* first free slot in the stack */
+ global_State *l_G;
+ CallInfo *ci; /* call info for current function */
+ const Instruction *oldpc; /* last pc traced */
+ StkId stack_last; /* last free slot in the stack */
+ StkId stack; /* stack base */
+ UpVal *openupval; /* list of open upvalues in this stack */
+ GCObject *gclist;
+ struct lua_State *twups; /* list of threads with open upvalues */
+ struct lua_longjmp *errorJmp; /* current error recover point */
+ CallInfo base_ci; /* CallInfo for first level (C calling Lua) */
+ volatile lua_Hook hook;
+ ptrdiff_t errfunc; /* current error handling function (stack index) */
+ int stacksize;
+ int basehookcount;
+ int hookcount;
+ unsigned short nny; /* number of non-yieldable calls in stack */
+ unsigned short nCcalls; /* number of nested C calls */
+ l_signalT hookmask;
+ lu_byte allowhook;
+};
+
+
+#define G(L) (L->l_G)
+
+
+/*
+** Union of all collectable objects (only for conversions)
+*/
+union GCUnion {
+ GCObject gc; /* common header */
+ struct TString ts;
+ struct Udata u;
+ union Closure cl;
+ struct Table h;
+ struct Proto p;
+ struct lua_State th; /* thread */
+};
+
+
+#define cast_u(o) cast(union GCUnion *, (o))
+
+/* macros to convert a GCObject into a specific value */
+#define gco2ts(o) \
+ check_exp(novariant((o)->tt) == LUA_TSTRING, &((cast_u(o))->ts))
+#define gco2u(o) check_exp((o)->tt == LUA_TUSERDATA, &((cast_u(o))->u))
+#define gco2lcl(o) check_exp((o)->tt == LUA_TLCL, &((cast_u(o))->cl.l))
+#define gco2ccl(o) check_exp((o)->tt == LUA_TCCL, &((cast_u(o))->cl.c))
+#define gco2cl(o) \
+ check_exp(novariant((o)->tt) == LUA_TFUNCTION, &((cast_u(o))->cl))
+#define gco2t(o) check_exp((o)->tt == LUA_TTABLE, &((cast_u(o))->h))
+#define gco2p(o) check_exp((o)->tt == LUA_TPROTO, &((cast_u(o))->p))
+#define gco2th(o) check_exp((o)->tt == LUA_TTHREAD, &((cast_u(o))->th))
+
+
+/* macro to convert a Lua object into a GCObject */
+#define obj2gco(v) \
+ check_exp(novariant((v)->tt) < LUA_TDEADKEY, (&(cast_u(v)->gc)))
+
+
+/* actual number of total bytes allocated */
+#define gettotalbytes(g) cast(lu_mem, (g)->totalbytes + (g)->GCdebt)
+
+LUAI_FUNC void luaE_setdebt (global_State *g, l_mem debt);
+LUAI_FUNC void luaE_freethread (lua_State *L, lua_State *L1);
+LUAI_FUNC CallInfo *luaE_extendCI (lua_State *L);
+LUAI_FUNC void luaE_freeCI (lua_State *L);
+LUAI_FUNC void luaE_shrinkCI (lua_State *L);
+
+
+#endif
+
diff --git a/lua/src/lstring.c b/lua/src/lstring.c
new file mode 100644
index 000000000..9351766fd
--- /dev/null
+++ b/lua/src/lstring.c
@@ -0,0 +1,248 @@
+/*
+** $Id: lstring.c,v 2.56 2015/11/23 11:32:51 roberto Exp $
+** String table (keeps all strings handled by Lua)
+** See Copyright Notice in lua.h
+*/
+
+#define lstring_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+
+
+#define MEMERRMSG "not enough memory"
+
+
+/*
+** Lua will use at most ~(2^LUAI_HASHLIMIT) bytes from a string to
+** compute its hash
+*/
+#if !defined(LUAI_HASHLIMIT)
+#define LUAI_HASHLIMIT 5
+#endif
+
+
+/*
+** equality for long strings
+*/
+int luaS_eqlngstr (TString *a, TString *b) {
+ size_t len = a->u.lnglen;
+ lua_assert(a->tt == LUA_TLNGSTR && b->tt == LUA_TLNGSTR);
+ return (a == b) || /* same instance or... */
+ ((len == b->u.lnglen) && /* equal length and ... */
+ (memcmp(getstr(a), getstr(b), len) == 0)); /* equal contents */
+}
+
+
+unsigned int luaS_hash (const char *str, size_t l, unsigned int seed) {
+ unsigned int h = seed ^ cast(unsigned int, l);
+ size_t step = (l >> LUAI_HASHLIMIT) + 1;
+ for (; l >= step; l -= step)
+ h ^= ((h<<5) + (h>>2) + cast_byte(str[l - 1]));
+ return h;
+}
+
+
+unsigned int luaS_hashlongstr (TString *ts) {
+ lua_assert(ts->tt == LUA_TLNGSTR);
+ if (ts->extra == 0) { /* no hash? */
+ ts->hash = luaS_hash(getstr(ts), ts->u.lnglen, ts->hash);
+ ts->extra = 1; /* now it has its hash */
+ }
+ return ts->hash;
+}
+
+
+/*
+** resizes the string table
+*/
+void luaS_resize (lua_State *L, int newsize) {
+ int i;
+ stringtable *tb = &G(L)->strt;
+ if (newsize > tb->size) { /* grow table if needed */
+ luaM_reallocvector(L, tb->hash, tb->size, newsize, TString *);
+ for (i = tb->size; i < newsize; i++)
+ tb->hash[i] = NULL;
+ }
+ for (i = 0; i < tb->size; i++) { /* rehash */
+ TString *p = tb->hash[i];
+ tb->hash[i] = NULL;
+ while (p) { /* for each node in the list */
+ TString *hnext = p->u.hnext; /* save next */
+ unsigned int h = lmod(p->hash, newsize); /* new position */
+ p->u.hnext = tb->hash[h]; /* chain it */
+ tb->hash[h] = p;
+ p = hnext;
+ }
+ }
+ if (newsize < tb->size) { /* shrink table if needed */
+ /* vanishing slice should be empty */
+ lua_assert(tb->hash[newsize] == NULL && tb->hash[tb->size - 1] == NULL);
+ luaM_reallocvector(L, tb->hash, tb->size, newsize, TString *);
+ }
+ tb->size = newsize;
+}
+
+
+/*
+** Clear API string cache. (Entries cannot be empty, so fill them with
+** a non-collectable string.)
+*/
+void luaS_clearcache (global_State *g) {
+ int i, j;
+ for (i = 0; i < STRCACHE_N; i++)
+ for (j = 0; j < STRCACHE_M; j++) {
+ if (iswhite(g->strcache[i][j])) /* will entry be collected? */
+ g->strcache[i][j] = g->memerrmsg; /* replace it with something fixed */
+ }
+}
+
+
+/*
+** Initialize the string table and the string cache
+*/
+void luaS_init (lua_State *L) {
+ global_State *g = G(L);
+ int i, j;
+ luaS_resize(L, MINSTRTABSIZE); /* initial size of string table */
+ /* pre-create memory-error message */
+ g->memerrmsg = luaS_newliteral(L, MEMERRMSG);
+ luaC_fix(L, obj2gco(g->memerrmsg)); /* it should never be collected */
+ for (i = 0; i < STRCACHE_N; i++) /* fill cache with valid strings */
+ for (j = 0; j < STRCACHE_M; j++)
+ g->strcache[i][j] = g->memerrmsg;
+}
+
+
+
+/*
+** creates a new string object
+*/
+static TString *createstrobj (lua_State *L, size_t l, int tag, unsigned int h) {
+ TString *ts;
+ GCObject *o;
+ size_t totalsize; /* total size of TString object */
+ totalsize = sizelstring(l);
+ o = luaC_newobj(L, tag, totalsize);
+ ts = gco2ts(o);
+ ts->hash = h;
+ ts->extra = 0;
+ getstr(ts)[l] = '\0'; /* ending 0 */
+ return ts;
+}
+
+
+TString *luaS_createlngstrobj (lua_State *L, size_t l) {
+ TString *ts = createstrobj(L, l, LUA_TLNGSTR, G(L)->seed);
+ ts->u.lnglen = l;
+ return ts;
+}
+
+
+void luaS_remove (lua_State *L, TString *ts) {
+ stringtable *tb = &G(L)->strt;
+ TString **p = &tb->hash[lmod(ts->hash, tb->size)];
+ while (*p != ts) /* find previous element */
+ p = &(*p)->u.hnext;
+ *p = (*p)->u.hnext; /* remove element from its list */
+ tb->nuse--;
+}
+
+
+/*
+** checks whether short string exists and reuses it or creates a new one
+*/
+static TString *internshrstr (lua_State *L, const char *str, size_t l) {
+ TString *ts;
+ global_State *g = G(L);
+ unsigned int h = luaS_hash(str, l, g->seed);
+ TString **list = &g->strt.hash[lmod(h, g->strt.size)];
+ lua_assert(str != NULL); /* otherwise 'memcmp'/'memcpy' are undefined */
+ for (ts = *list; ts != NULL; ts = ts->u.hnext) {
+ if (l == ts->shrlen &&
+ (memcmp(str, getstr(ts), l * sizeof(char)) == 0)) {
+ /* found! */
+ if (isdead(g, ts)) /* dead (but not collected yet)? */
+ changewhite(ts); /* resurrect it */
+ return ts;
+ }
+ }
+ if (g->strt.nuse >= g->strt.size && g->strt.size <= MAX_INT/2) {
+ luaS_resize(L, g->strt.size * 2);
+ list = &g->strt.hash[lmod(h, g->strt.size)]; /* recompute with new size */
+ }
+ ts = createstrobj(L, l, LUA_TSHRSTR, h);
+ memcpy(getstr(ts), str, l * sizeof(char));
+ ts->shrlen = cast_byte(l);
+ ts->u.hnext = *list;
+ *list = ts;
+ g->strt.nuse++;
+ return ts;
+}
+
+
+/*
+** new string (with explicit length)
+*/
+TString *luaS_newlstr (lua_State *L, const char *str, size_t l) {
+ if (l <= LUAI_MAXSHORTLEN) /* short string? */
+ return internshrstr(L, str, l);
+ else {
+ TString *ts;
+ if (l >= (MAX_SIZE - sizeof(TString))/sizeof(char))
+ luaM_toobig(L);
+ ts = luaS_createlngstrobj(L, l);
+ memcpy(getstr(ts), str, l * sizeof(char));
+ return ts;
+ }
+}
+
+
+/*
+** Create or reuse a zero-terminated string, first checking in the
+** cache (using the string address as a key). The cache can contain
+** only zero-terminated strings, so it is safe to use 'strcmp' to
+** check hits.
+*/
+TString *luaS_new (lua_State *L, const char *str) {
+ unsigned int i = point2uint(str) % STRCACHE_N; /* hash */
+ int j;
+ TString **p = G(L)->strcache[i];
+ for (j = 0; j < STRCACHE_M; j++) {
+ if (strcmp(str, getstr(p[j])) == 0) /* hit? */
+ return p[j]; /* that is it */
+ }
+ /* normal route */
+ for (j = STRCACHE_M - 1; j > 0; j--)
+ p[j] = p[j - 1]; /* move out last element */
+ /* new element is first in the list */
+ p[0] = luaS_newlstr(L, str, strlen(str));
+ return p[0];
+}
+
+
+Udata *luaS_newudata (lua_State *L, size_t s) {
+ Udata *u;
+ GCObject *o;
+ if (s > MAX_SIZE - sizeof(Udata))
+ luaM_toobig(L);
+ o = luaC_newobj(L, LUA_TUSERDATA, sizeludata(s));
+ u = gco2u(o);
+ u->len = s;
+ u->metatable = NULL;
+ setuservalue(L, u, luaO_nilobject);
+ return u;
+}
+
diff --git a/lua/src/lstring.h b/lua/src/lstring.h
new file mode 100644
index 000000000..27efd2077
--- /dev/null
+++ b/lua/src/lstring.h
@@ -0,0 +1,49 @@
+/*
+** $Id: lstring.h,v 1.61 2015/11/03 15:36:01 roberto Exp $
+** String table (keep all strings handled by Lua)
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lstring_h
+#define lstring_h
+
+#include "lgc.h"
+#include "lobject.h"
+#include "lstate.h"
+
+
+#define sizelstring(l) (sizeof(union UTString) + ((l) + 1) * sizeof(char))
+
+#define sizeludata(l) (sizeof(union UUdata) + (l))
+#define sizeudata(u) sizeludata((u)->len)
+
+#define luaS_newliteral(L, s) (luaS_newlstr(L, "" s, \
+ (sizeof(s)/sizeof(char))-1))
+
+
+/*
+** test whether a string is a reserved word
+*/
+#define isreserved(s) ((s)->tt == LUA_TSHRSTR && (s)->extra > 0)
+
+
+/*
+** equality for short strings, which are always internalized
+*/
+#define eqshrstr(a,b) check_exp((a)->tt == LUA_TSHRSTR, (a) == (b))
+
+
+LUAI_FUNC unsigned int luaS_hash (const char *str, size_t l, unsigned int seed);
+LUAI_FUNC unsigned int luaS_hashlongstr (TString *ts);
+LUAI_FUNC int luaS_eqlngstr (TString *a, TString *b);
+LUAI_FUNC void luaS_resize (lua_State *L, int newsize);
+LUAI_FUNC void luaS_clearcache (global_State *g);
+LUAI_FUNC void luaS_init (lua_State *L);
+LUAI_FUNC void luaS_remove (lua_State *L, TString *ts);
+LUAI_FUNC Udata *luaS_newudata (lua_State *L, size_t s);
+LUAI_FUNC TString *luaS_newlstr (lua_State *L, const char *str, size_t l);
+LUAI_FUNC TString *luaS_new (lua_State *L, const char *str);
+LUAI_FUNC TString *luaS_createlngstrobj (lua_State *L, size_t l);
+
+
+#endif
diff --git a/lua/src/lstrlib.c b/lua/src/lstrlib.c
new file mode 100644
index 000000000..c7aa755fa
--- /dev/null
+++ b/lua/src/lstrlib.c
@@ -0,0 +1,1584 @@
+/*
+** $Id: lstrlib.c,v 1.254 2016/12/22 13:08:50 roberto Exp $
+** Standard library for string operations and pattern-matching
+** See Copyright Notice in lua.h
+*/
+
+#define lstrlib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <ctype.h>
+#include <float.h>
+#include <limits.h>
+#include <locale.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+/*
+** maximum number of captures that a pattern can do during
+** pattern-matching. This limit is arbitrary, but must fit in
+** an unsigned char.
+*/
+#if !defined(LUA_MAXCAPTURES)
+#define LUA_MAXCAPTURES 32
+#endif
+
+
+/* macro to 'unsign' a character */
+#define uchar(c) ((unsigned char)(c))
+
+
+/*
+** Some sizes are better limited to fit in 'int', but must also fit in
+** 'size_t'. (We assume that 'lua_Integer' cannot be smaller than 'int'.)
+*/
+#define MAX_SIZET ((size_t)(~(size_t)0))
+
+#define MAXSIZE \
+ (sizeof(size_t) < sizeof(int) ? MAX_SIZET : (size_t)(INT_MAX))
+
+
+
+
+static int str_len (lua_State *L) {
+ size_t l;
+ luaL_checklstring(L, 1, &l);
+ lua_pushinteger(L, (lua_Integer)l);
+ return 1;
+}
+
+
+/* translate a relative string position: negative means back from end */
+static lua_Integer posrelat (lua_Integer pos, size_t len) {
+ if (pos >= 0) return pos;
+ else if (0u - (size_t)pos > len) return 0;
+ else return (lua_Integer)len + pos + 1;
+}
+
+
+static int str_sub (lua_State *L) {
+ size_t l;
+ const char *s = luaL_checklstring(L, 1, &l);
+ lua_Integer start = posrelat(luaL_checkinteger(L, 2), l);
+ lua_Integer end = posrelat(luaL_optinteger(L, 3, -1), l);
+ if (start < 1) start = 1;
+ if (end > (lua_Integer)l) end = l;
+ if (start <= end)
+ lua_pushlstring(L, s + start - 1, (size_t)(end - start) + 1);
+ else lua_pushliteral(L, "");
+ return 1;
+}
+
+
+static int str_reverse (lua_State *L) {
+ size_t l, i;
+ luaL_Buffer b;
+ const char *s = luaL_checklstring(L, 1, &l);
+ char *p = luaL_buffinitsize(L, &b, l);
+ for (i = 0; i < l; i++)
+ p[i] = s[l - i - 1];
+ luaL_pushresultsize(&b, l);
+ return 1;
+}
+
+
+static int str_lower (lua_State *L) {
+ size_t l;
+ size_t i;
+ luaL_Buffer b;
+ const char *s = luaL_checklstring(L, 1, &l);
+ char *p = luaL_buffinitsize(L, &b, l);
+ for (i=0; i<l; i++)
+ p[i] = tolower(uchar(s[i]));
+ luaL_pushresultsize(&b, l);
+ return 1;
+}
+
+
+static int str_upper (lua_State *L) {
+ size_t l;
+ size_t i;
+ luaL_Buffer b;
+ const char *s = luaL_checklstring(L, 1, &l);
+ char *p = luaL_buffinitsize(L, &b, l);
+ for (i=0; i<l; i++)
+ p[i] = toupper(uchar(s[i]));
+ luaL_pushresultsize(&b, l);
+ return 1;
+}
+
+
+static int str_rep (lua_State *L) {
+ size_t l, lsep;
+ const char *s = luaL_checklstring(L, 1, &l);
+ lua_Integer n = luaL_checkinteger(L, 2);
+ const char *sep = luaL_optlstring(L, 3, "", &lsep);
+ if (n <= 0) lua_pushliteral(L, "");
+ else if (l + lsep < l || l + lsep > MAXSIZE / n) /* may overflow? */
+ return luaL_error(L, "resulting string too large");
+ else {
+ size_t totallen = (size_t)n * l + (size_t)(n - 1) * lsep;
+ luaL_Buffer b;
+ char *p = luaL_buffinitsize(L, &b, totallen);
+ while (n-- > 1) { /* first n-1 copies (followed by separator) */
+ memcpy(p, s, l * sizeof(char)); p += l;
+ if (lsep > 0) { /* empty 'memcpy' is not that cheap */
+ memcpy(p, sep, lsep * sizeof(char));
+ p += lsep;
+ }
+ }
+ memcpy(p, s, l * sizeof(char)); /* last copy (not followed by separator) */
+ luaL_pushresultsize(&b, totallen);
+ }
+ return 1;
+}
+
+
+static int str_byte (lua_State *L) {
+ size_t l;
+ const char *s = luaL_checklstring(L, 1, &l);
+ lua_Integer posi = posrelat(luaL_optinteger(L, 2, 1), l);
+ lua_Integer pose = posrelat(luaL_optinteger(L, 3, posi), l);
+ int n, i;
+ if (posi < 1) posi = 1;
+ if (pose > (lua_Integer)l) pose = l;
+ if (posi > pose) return 0; /* empty interval; return no values */
+ if (pose - posi >= INT_MAX) /* arithmetic overflow? */
+ return luaL_error(L, "string slice too long");
+ n = (int)(pose - posi) + 1;
+ luaL_checkstack(L, n, "string slice too long");
+ for (i=0; i<n; i++)
+ lua_pushinteger(L, uchar(s[posi+i-1]));
+ return n;
+}
+
+
+static int str_char (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ int i;
+ luaL_Buffer b;
+ char *p = luaL_buffinitsize(L, &b, n);
+ for (i=1; i<=n; i++) {
+ lua_Integer c = luaL_checkinteger(L, i);
+ luaL_argcheck(L, uchar(c) == c, i, "value out of range");
+ p[i - 1] = uchar(c);
+ }
+ luaL_pushresultsize(&b, n);
+ return 1;
+}
+
+
+static int writer (lua_State *L, const void *b, size_t size, void *B) {
+ (void)L;
+ luaL_addlstring((luaL_Buffer *) B, (const char *)b, size);
+ return 0;
+}
+
+
+static int str_dump (lua_State *L) {
+ luaL_Buffer b;
+ int strip = lua_toboolean(L, 2);
+ luaL_checktype(L, 1, LUA_TFUNCTION);
+ lua_settop(L, 1);
+ luaL_buffinit(L,&b);
+ if (lua_dump(L, writer, &b, strip) != 0)
+ return luaL_error(L, "unable to dump given function");
+ luaL_pushresult(&b);
+ return 1;
+}
+
+
+
+/*
+** {======================================================
+** PATTERN MATCHING
+** =======================================================
+*/
+
+
+#define CAP_UNFINISHED (-1)
+#define CAP_POSITION (-2)
+
+
+typedef struct MatchState {
+ const char *src_init; /* init of source string */
+ const char *src_end; /* end ('\0') of source string */
+ const char *p_end; /* end ('\0') of pattern */
+ lua_State *L;
+ int matchdepth; /* control for recursive depth (to avoid C stack overflow) */
+ unsigned char level; /* total number of captures (finished or unfinished) */
+ struct {
+ const char *init;
+ ptrdiff_t len;
+ } capture[LUA_MAXCAPTURES];
+} MatchState;
+
+
+/* recursive function */
+static const char *match (MatchState *ms, const char *s, const char *p);
+
+
+/* maximum recursion depth for 'match' */
+#if !defined(MAXCCALLS)
+#define MAXCCALLS 200
+#endif
+
+
+#define L_ESC '%'
+#define SPECIALS "^$*+?.([%-"
+
+
+static int check_capture (MatchState *ms, int l) {
+ l -= '1';
+ if (l < 0 || l >= ms->level || ms->capture[l].len == CAP_UNFINISHED)
+ return luaL_error(ms->L, "invalid capture index %%%d", l + 1);
+ return l;
+}
+
+
+static int capture_to_close (MatchState *ms) {
+ int level = ms->level;
+ for (level--; level>=0; level--)
+ if (ms->capture[level].len == CAP_UNFINISHED) return level;
+ return luaL_error(ms->L, "invalid pattern capture");
+}
+
+
+static const char *classend (MatchState *ms, const char *p) {
+ switch (*p++) {
+ case L_ESC: {
+ if (p == ms->p_end)
+ luaL_error(ms->L, "malformed pattern (ends with '%%')");
+ return p+1;
+ }
+ case '[': {
+ if (*p == '^') p++;
+ do { /* look for a ']' */
+ if (p == ms->p_end)
+ luaL_error(ms->L, "malformed pattern (missing ']')");
+ if (*(p++) == L_ESC && p < ms->p_end)
+ p++; /* skip escapes (e.g. '%]') */
+ } while (*p != ']');
+ return p+1;
+ }
+ default: {
+ return p;
+ }
+ }
+}
+
+
+static int match_class (int c, int cl) {
+ int res;
+ switch (tolower(cl)) {
+ case 'a' : res = isalpha(c); break;
+ case 'c' : res = iscntrl(c); break;
+ case 'd' : res = isdigit(c); break;
+ case 'g' : res = isgraph(c); break;
+ case 'l' : res = islower(c); break;
+ case 'p' : res = ispunct(c); break;
+ case 's' : res = isspace(c); break;
+ case 'u' : res = isupper(c); break;
+ case 'w' : res = isalnum(c); break;
+ case 'x' : res = isxdigit(c); break;
+ case 'z' : res = (c == 0); break; /* deprecated option */
+ default: return (cl == c);
+ }
+ return (islower(cl) ? res : !res);
+}
+
+
+static int matchbracketclass (int c, const char *p, const char *ec) {
+ int sig = 1;
+ if (*(p+1) == '^') {
+ sig = 0;
+ p++; /* skip the '^' */
+ }
+ while (++p < ec) {
+ if (*p == L_ESC) {
+ p++;
+ if (match_class(c, uchar(*p)))
+ return sig;
+ }
+ else if ((*(p+1) == '-') && (p+2 < ec)) {
+ p+=2;
+ if (uchar(*(p-2)) <= c && c <= uchar(*p))
+ return sig;
+ }
+ else if (uchar(*p) == c) return sig;
+ }
+ return !sig;
+}
+
+
+static int singlematch (MatchState *ms, const char *s, const char *p,
+ const char *ep) {
+ if (s >= ms->src_end)
+ return 0;
+ else {
+ int c = uchar(*s);
+ switch (*p) {
+ case '.': return 1; /* matches any char */
+ case L_ESC: return match_class(c, uchar(*(p+1)));
+ case '[': return matchbracketclass(c, p, ep-1);
+ default: return (uchar(*p) == c);
+ }
+ }
+}
+
+
+static const char *matchbalance (MatchState *ms, const char *s,
+ const char *p) {
+ if (p >= ms->p_end - 1)
+ luaL_error(ms->L, "malformed pattern (missing arguments to '%%b')");
+ if (*s != *p) return NULL;
+ else {
+ int b = *p;
+ int e = *(p+1);
+ int cont = 1;
+ while (++s < ms->src_end) {
+ if (*s == e) {
+ if (--cont == 0) return s+1;
+ }
+ else if (*s == b) cont++;
+ }
+ }
+ return NULL; /* string ends out of balance */
+}
+
+
+static const char *max_expand (MatchState *ms, const char *s,
+ const char *p, const char *ep) {
+ ptrdiff_t i = 0; /* counts maximum expand for item */
+ while (singlematch(ms, s + i, p, ep))
+ i++;
+ /* keeps trying to match with the maximum repetitions */
+ while (i>=0) {
+ const char *res = match(ms, (s+i), ep+1);
+ if (res) return res;
+ i--; /* else didn't match; reduce 1 repetition to try again */
+ }
+ return NULL;
+}
+
+
+static const char *min_expand (MatchState *ms, const char *s,
+ const char *p, const char *ep) {
+ for (;;) {
+ const char *res = match(ms, s, ep+1);
+ if (res != NULL)
+ return res;
+ else if (singlematch(ms, s, p, ep))
+ s++; /* try with one more repetition */
+ else return NULL;
+ }
+}
+
+
+static const char *start_capture (MatchState *ms, const char *s,
+ const char *p, int what) {
+ const char *res;
+ int level = ms->level;
+ if (level >= LUA_MAXCAPTURES) luaL_error(ms->L, "too many captures");
+ ms->capture[level].init = s;
+ ms->capture[level].len = what;
+ ms->level = level+1;
+ if ((res=match(ms, s, p)) == NULL) /* match failed? */
+ ms->level--; /* undo capture */
+ return res;
+}
+
+
+static const char *end_capture (MatchState *ms, const char *s,
+ const char *p) {
+ int l = capture_to_close(ms);
+ const char *res;
+ ms->capture[l].len = s - ms->capture[l].init; /* close capture */
+ if ((res = match(ms, s, p)) == NULL) /* match failed? */
+ ms->capture[l].len = CAP_UNFINISHED; /* undo capture */
+ return res;
+}
+
+
+static const char *match_capture (MatchState *ms, const char *s, int l) {
+ size_t len;
+ l = check_capture(ms, l);
+ len = ms->capture[l].len;
+ if ((size_t)(ms->src_end-s) >= len &&
+ memcmp(ms->capture[l].init, s, len) == 0)
+ return s+len;
+ else return NULL;
+}
+
+
+static const char *match (MatchState *ms, const char *s, const char *p) {
+ if (ms->matchdepth-- == 0)
+ luaL_error(ms->L, "pattern too complex");
+ init: /* using goto's to optimize tail recursion */
+ if (p != ms->p_end) { /* end of pattern? */
+ switch (*p) {
+ case '(': { /* start capture */
+ if (*(p + 1) == ')') /* position capture? */
+ s = start_capture(ms, s, p + 2, CAP_POSITION);
+ else
+ s = start_capture(ms, s, p + 1, CAP_UNFINISHED);
+ break;
+ }
+ case ')': { /* end capture */
+ s = end_capture(ms, s, p + 1);
+ break;
+ }
+ case '$': {
+ if ((p + 1) != ms->p_end) /* is the '$' the last char in pattern? */
+ goto dflt; /* no; go to default */
+ s = (s == ms->src_end) ? s : NULL; /* check end of string */
+ break;
+ }
+ case L_ESC: { /* escaped sequences not in the format class[*+?-]? */
+ switch (*(p + 1)) {
+ case 'b': { /* balanced string? */
+ s = matchbalance(ms, s, p + 2);
+ if (s != NULL) {
+ p += 4; goto init; /* return match(ms, s, p + 4); */
+ } /* else fail (s == NULL) */
+ break;
+ }
+ case 'f': { /* frontier? */
+ const char *ep; char previous;
+ p += 2;
+ if (*p != '[')
+ luaL_error(ms->L, "missing '[' after '%%f' in pattern");
+ ep = classend(ms, p); /* points to what is next */
+ previous = (s == ms->src_init) ? '\0' : *(s - 1);
+ if (!matchbracketclass(uchar(previous), p, ep - 1) &&
+ matchbracketclass(uchar(*s), p, ep - 1)) {
+ p = ep; goto init; /* return match(ms, s, ep); */
+ }
+ s = NULL; /* match failed */
+ break;
+ }
+ case '0': case '1': case '2': case '3':
+ case '4': case '5': case '6': case '7':
+ case '8': case '9': { /* capture results (%0-%9)? */
+ s = match_capture(ms, s, uchar(*(p + 1)));
+ if (s != NULL) {
+ p += 2; goto init; /* return match(ms, s, p + 2) */
+ }
+ break;
+ }
+ default: goto dflt;
+ }
+ break;
+ }
+ default: dflt: { /* pattern class plus optional suffix */
+ const char *ep = classend(ms, p); /* points to optional suffix */
+ /* does not match at least once? */
+ if (!singlematch(ms, s, p, ep)) {
+ if (*ep == '*' || *ep == '?' || *ep == '-') { /* accept empty? */
+ p = ep + 1; goto init; /* return match(ms, s, ep + 1); */
+ }
+ else /* '+' or no suffix */
+ s = NULL; /* fail */
+ }
+ else { /* matched once */
+ switch (*ep) { /* handle optional suffix */
+ case '?': { /* optional */
+ const char *res;
+ if ((res = match(ms, s + 1, ep + 1)) != NULL)
+ s = res;
+ else {
+ p = ep + 1; goto init; /* else return match(ms, s, ep + 1); */
+ }
+ break;
+ }
+ case '+': /* 1 or more repetitions */
+ s++; /* 1 match already done */
+ /* FALLTHROUGH */
+ case '*': /* 0 or more repetitions */
+ s = max_expand(ms, s, p, ep);
+ break;
+ case '-': /* 0 or more repetitions (minimum) */
+ s = min_expand(ms, s, p, ep);
+ break;
+ default: /* no suffix */
+ s++; p = ep; goto init; /* return match(ms, s + 1, ep); */
+ }
+ }
+ break;
+ }
+ }
+ }
+ ms->matchdepth++;
+ return s;
+}
+
+
+
+static const char *lmemfind (const char *s1, size_t l1,
+ const char *s2, size_t l2) {
+ if (l2 == 0) return s1; /* empty strings are everywhere */
+ else if (l2 > l1) return NULL; /* avoids a negative 'l1' */
+ else {
+ const char *init; /* to search for a '*s2' inside 's1' */
+ l2--; /* 1st char will be checked by 'memchr' */
+ l1 = l1-l2; /* 's2' cannot be found after that */
+ while (l1 > 0 && (init = (const char *)memchr(s1, *s2, l1)) != NULL) {
+ init++; /* 1st char is already checked */
+ if (memcmp(init, s2+1, l2) == 0)
+ return init-1;
+ else { /* correct 'l1' and 's1' to try again */
+ l1 -= init-s1;
+ s1 = init;
+ }
+ }
+ return NULL; /* not found */
+ }
+}
+
+
+static void push_onecapture (MatchState *ms, int i, const char *s,
+ const char *e) {
+ if (i >= ms->level) {
+ if (i == 0) /* ms->level == 0, too */
+ lua_pushlstring(ms->L, s, e - s); /* add whole match */
+ else
+ luaL_error(ms->L, "invalid capture index %%%d", i + 1);
+ }
+ else {
+ ptrdiff_t l = ms->capture[i].len;
+ if (l == CAP_UNFINISHED) luaL_error(ms->L, "unfinished capture");
+ if (l == CAP_POSITION)
+ lua_pushinteger(ms->L, (ms->capture[i].init - ms->src_init) + 1);
+ else
+ lua_pushlstring(ms->L, ms->capture[i].init, l);
+ }
+}
+
+
+static int push_captures (MatchState *ms, const char *s, const char *e) {
+ int i;
+ int nlevels = (ms->level == 0 && s) ? 1 : ms->level;
+ luaL_checkstack(ms->L, nlevels, "too many captures");
+ for (i = 0; i < nlevels; i++)
+ push_onecapture(ms, i, s, e);
+ return nlevels; /* number of strings pushed */
+}
+
+
+/* check whether pattern has no special characters */
+static int nospecials (const char *p, size_t l) {
+ size_t upto = 0;
+ do {
+ if (strpbrk(p + upto, SPECIALS))
+ return 0; /* pattern has a special character */
+ upto += strlen(p + upto) + 1; /* may have more after \0 */
+ } while (upto <= l);
+ return 1; /* no special chars found */
+}
+
+
+static void prepstate (MatchState *ms, lua_State *L,
+ const char *s, size_t ls, const char *p, size_t lp) {
+ ms->L = L;
+ ms->matchdepth = MAXCCALLS;
+ ms->src_init = s;
+ ms->src_end = s + ls;
+ ms->p_end = p + lp;
+}
+
+
+static void reprepstate (MatchState *ms) {
+ ms->level = 0;
+ lua_assert(ms->matchdepth == MAXCCALLS);
+}
+
+
+static int str_find_aux (lua_State *L, int find) {
+ size_t ls, lp;
+ const char *s = luaL_checklstring(L, 1, &ls);
+ const char *p = luaL_checklstring(L, 2, &lp);
+ lua_Integer init = posrelat(luaL_optinteger(L, 3, 1), ls);
+ if (init < 1) init = 1;
+ else if (init > (lua_Integer)ls + 1) { /* start after string's end? */
+ lua_pushnil(L); /* cannot find anything */
+ return 1;
+ }
+ /* explicit request or no special characters? */
+ if (find && (lua_toboolean(L, 4) || nospecials(p, lp))) {
+ /* do a plain search */
+ const char *s2 = lmemfind(s + init - 1, ls - (size_t)init + 1, p, lp);
+ if (s2) {
+ lua_pushinteger(L, (s2 - s) + 1);
+ lua_pushinteger(L, (s2 - s) + lp);
+ return 2;
+ }
+ }
+ else {
+ MatchState ms;
+ const char *s1 = s + init - 1;
+ int anchor = (*p == '^');
+ if (anchor) {
+ p++; lp--; /* skip anchor character */
+ }
+ prepstate(&ms, L, s, ls, p, lp);
+ do {
+ const char *res;
+ reprepstate(&ms);
+ if ((res=match(&ms, s1, p)) != NULL) {
+ if (find) {
+ lua_pushinteger(L, (s1 - s) + 1); /* start */
+ lua_pushinteger(L, res - s); /* end */
+ return push_captures(&ms, NULL, 0) + 2;
+ }
+ else
+ return push_captures(&ms, s1, res);
+ }
+ } while (s1++ < ms.src_end && !anchor);
+ }
+ lua_pushnil(L); /* not found */
+ return 1;
+}
+
+
+static int str_find (lua_State *L) {
+ return str_find_aux(L, 1);
+}
+
+
+static int str_match (lua_State *L) {
+ return str_find_aux(L, 0);
+}
+
+
+/* state for 'gmatch' */
+typedef struct GMatchState {
+ const char *src; /* current position */
+ const char *p; /* pattern */
+ const char *lastmatch; /* end of last match */
+ MatchState ms; /* match state */
+} GMatchState;
+
+
+static int gmatch_aux (lua_State *L) {
+ GMatchState *gm = (GMatchState *)lua_touserdata(L, lua_upvalueindex(3));
+ const char *src;
+ gm->ms.L = L;
+ for (src = gm->src; src <= gm->ms.src_end; src++) {
+ const char *e;
+ reprepstate(&gm->ms);
+ if ((e = match(&gm->ms, src, gm->p)) != NULL && e != gm->lastmatch) {
+ gm->src = gm->lastmatch = e;
+ return push_captures(&gm->ms, src, e);
+ }
+ }
+ return 0; /* not found */
+}
+
+
+static int gmatch (lua_State *L) {
+ size_t ls, lp;
+ const char *s = luaL_checklstring(L, 1, &ls);
+ const char *p = luaL_checklstring(L, 2, &lp);
+ GMatchState *gm;
+ lua_settop(L, 2); /* keep them on closure to avoid being collected */
+ gm = (GMatchState *)lua_newuserdata(L, sizeof(GMatchState));
+ prepstate(&gm->ms, L, s, ls, p, lp);
+ gm->src = s; gm->p = p; gm->lastmatch = NULL;
+ lua_pushcclosure(L, gmatch_aux, 3);
+ return 1;
+}
+
+
+static void add_s (MatchState *ms, luaL_Buffer *b, const char *s,
+ const char *e) {
+ size_t l, i;
+ lua_State *L = ms->L;
+ const char *news = lua_tolstring(L, 3, &l);
+ for (i = 0; i < l; i++) {
+ if (news[i] != L_ESC)
+ luaL_addchar(b, news[i]);
+ else {
+ i++; /* skip ESC */
+ if (!isdigit(uchar(news[i]))) {
+ if (news[i] != L_ESC)
+ luaL_error(L, "invalid use of '%c' in replacement string", L_ESC);
+ luaL_addchar(b, news[i]);
+ }
+ else if (news[i] == '0')
+ luaL_addlstring(b, s, e - s);
+ else {
+ push_onecapture(ms, news[i] - '1', s, e);
+ luaL_tolstring(L, -1, NULL); /* if number, convert it to string */
+ lua_remove(L, -2); /* remove original value */
+ luaL_addvalue(b); /* add capture to accumulated result */
+ }
+ }
+ }
+}
+
+
+static void add_value (MatchState *ms, luaL_Buffer *b, const char *s,
+ const char *e, int tr) {
+ lua_State *L = ms->L;
+ switch (tr) {
+ case LUA_TFUNCTION: {
+ int n;
+ lua_pushvalue(L, 3);
+ n = push_captures(ms, s, e);
+ lua_call(L, n, 1);
+ break;
+ }
+ case LUA_TTABLE: {
+ push_onecapture(ms, 0, s, e);
+ lua_gettable(L, 3);
+ break;
+ }
+ default: { /* LUA_TNUMBER or LUA_TSTRING */
+ add_s(ms, b, s, e);
+ return;
+ }
+ }
+ if (!lua_toboolean(L, -1)) { /* nil or false? */
+ lua_pop(L, 1);
+ lua_pushlstring(L, s, e - s); /* keep original text */
+ }
+ else if (!lua_isstring(L, -1))
+ luaL_error(L, "invalid replacement value (a %s)", luaL_typename(L, -1));
+ luaL_addvalue(b); /* add result to accumulator */
+}
+
+
+static int str_gsub (lua_State *L) {
+ size_t srcl, lp;
+ const char *src = luaL_checklstring(L, 1, &srcl); /* subject */
+ const char *p = luaL_checklstring(L, 2, &lp); /* pattern */
+ const char *lastmatch = NULL; /* end of last match */
+ int tr = lua_type(L, 3); /* replacement type */
+ lua_Integer max_s = luaL_optinteger(L, 4, srcl + 1); /* max replacements */
+ int anchor = (*p == '^');
+ lua_Integer n = 0; /* replacement count */
+ MatchState ms;
+ luaL_Buffer b;
+ luaL_argcheck(L, tr == LUA_TNUMBER || tr == LUA_TSTRING ||
+ tr == LUA_TFUNCTION || tr == LUA_TTABLE, 3,
+ "string/function/table expected");
+ luaL_buffinit(L, &b);
+ if (anchor) {
+ p++; lp--; /* skip anchor character */
+ }
+ prepstate(&ms, L, src, srcl, p, lp);
+ while (n < max_s) {
+ const char *e;
+ reprepstate(&ms); /* (re)prepare state for new match */
+ if ((e = match(&ms, src, p)) != NULL && e != lastmatch) { /* match? */
+ n++;
+ add_value(&ms, &b, src, e, tr); /* add replacement to buffer */
+ src = lastmatch = e;
+ }
+ else if (src < ms.src_end) /* otherwise, skip one character */
+ luaL_addchar(&b, *src++);
+ else break; /* end of subject */
+ if (anchor) break;
+ }
+ luaL_addlstring(&b, src, ms.src_end-src);
+ luaL_pushresult(&b);
+ lua_pushinteger(L, n); /* number of substitutions */
+ return 2;
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** STRING FORMAT
+** =======================================================
+*/
+
+#if !defined(lua_number2strx) /* { */
+
+/*
+** Hexadecimal floating-point formatter
+*/
+
+#include <math.h>
+
+#define SIZELENMOD (sizeof(LUA_NUMBER_FRMLEN)/sizeof(char))
+
+
+/*
+** Number of bits that goes into the first digit. It can be any value
+** between 1 and 4; the following definition tries to align the number
+** to nibble boundaries by making what is left after that first digit a
+** multiple of 4.
+*/
+#define L_NBFD ((l_mathlim(MANT_DIG) - 1)%4 + 1)
+
+
+/*
+** Add integer part of 'x' to buffer and return new 'x'
+*/
+static lua_Number adddigit (char *buff, int n, lua_Number x) {
+ lua_Number dd = l_mathop(floor)(x); /* get integer part from 'x' */
+ int d = (int)dd;
+ buff[n] = (d < 10 ? d + '0' : d - 10 + 'a'); /* add to buffer */
+ return x - dd; /* return what is left */
+}
+
+
+static int num2straux (char *buff, int sz, lua_Number x) {
+ /* if 'inf' or 'NaN', format it like '%g' */
+ if (x != x || x == (lua_Number)HUGE_VAL || x == -(lua_Number)HUGE_VAL)
+ return l_sprintf(buff, sz, LUA_NUMBER_FMT, (LUAI_UACNUMBER)x);
+ else if (x == 0) { /* can be -0... */
+ /* create "0" or "-0" followed by exponent */
+ return l_sprintf(buff, sz, LUA_NUMBER_FMT "x0p+0", (LUAI_UACNUMBER)x);
+ }
+ else {
+ int e;
+ lua_Number m = l_mathop(frexp)(x, &e); /* 'x' fraction and exponent */
+ int n = 0; /* character count */
+ if (m < 0) { /* is number negative? */
+ buff[n++] = '-'; /* add signal */
+ m = -m; /* make it positive */
+ }
+ buff[n++] = '0'; buff[n++] = 'x'; /* add "0x" */
+ m = adddigit(buff, n++, m * (1 << L_NBFD)); /* add first digit */
+ e -= L_NBFD; /* this digit goes before the radix point */
+ if (m > 0) { /* more digits? */
+ buff[n++] = lua_getlocaledecpoint(); /* add radix point */
+ do { /* add as many digits as needed */
+ m = adddigit(buff, n++, m * 16);
+ } while (m > 0);
+ }
+ n += l_sprintf(buff + n, sz - n, "p%+d", e); /* add exponent */
+ lua_assert(n < sz);
+ return n;
+ }
+}
+
+
+static int lua_number2strx (lua_State *L, char *buff, int sz,
+ const char *fmt, lua_Number x) {
+ int n = num2straux(buff, sz, x);
+ if (fmt[SIZELENMOD] == 'A') {
+ int i;
+ for (i = 0; i < n; i++)
+ buff[i] = toupper(uchar(buff[i]));
+ }
+ else if (fmt[SIZELENMOD] != 'a')
+ luaL_error(L, "modifiers for format '%%a'/'%%A' not implemented");
+ return n;
+}
+
+#endif /* } */
+
+
+/*
+** Maximum size of each formatted item. This maximum size is produced
+** by format('%.99f', -maxfloat), and is equal to 99 + 3 ('-', '.',
+** and '\0') + number of decimal digits to represent maxfloat (which
+** is maximum exponent + 1). (99+3+1 then rounded to 120 for "extra
+** expenses", such as locale-dependent stuff)
+*/
+#define MAX_ITEM (120 + l_mathlim(MAX_10_EXP))
+
+
+/* valid flags in a format specification */
+#define FLAGS "-+ #0"
+
+/*
+** maximum size of each format specification (such as "%-099.99d")
+*/
+#define MAX_FORMAT 32
+
+
+static void addquoted (luaL_Buffer *b, const char *s, size_t len) {
+ luaL_addchar(b, '"');
+ while (len--) {
+ if (*s == '"' || *s == '\\' || *s == '\n') {
+ luaL_addchar(b, '\\');
+ luaL_addchar(b, *s);
+ }
+ else if (iscntrl(uchar(*s))) {
+ char buff[10];
+ if (!isdigit(uchar(*(s+1))))
+ l_sprintf(buff, sizeof(buff), "\\%d", (int)uchar(*s));
+ else
+ l_sprintf(buff, sizeof(buff), "\\%03d", (int)uchar(*s));
+ luaL_addstring(b, buff);
+ }
+ else
+ luaL_addchar(b, *s);
+ s++;
+ }
+ luaL_addchar(b, '"');
+}
+
+
+/*
+** Ensures the 'buff' string uses a dot as the radix character.
+*/
+static void checkdp (char *buff, int nb) {
+ if (memchr(buff, '.', nb) == NULL) { /* no dot? */
+ char point = lua_getlocaledecpoint(); /* try locale point */
+ char *ppoint = (char *)memchr(buff, point, nb);
+ if (ppoint) *ppoint = '.'; /* change it to a dot */
+ }
+}
+
+
+static void addliteral (lua_State *L, luaL_Buffer *b, int arg) {
+ switch (lua_type(L, arg)) {
+ case LUA_TSTRING: {
+ size_t len;
+ const char *s = lua_tolstring(L, arg, &len);
+ addquoted(b, s, len);
+ break;
+ }
+ case LUA_TNUMBER: {
+ char *buff = luaL_prepbuffsize(b, MAX_ITEM);
+ int nb;
+ if (!lua_isinteger(L, arg)) { /* float? */
+ lua_Number n = lua_tonumber(L, arg); /* write as hexa ('%a') */
+ nb = lua_number2strx(L, buff, MAX_ITEM, "%" LUA_NUMBER_FRMLEN "a", n);
+ checkdp(buff, nb); /* ensure it uses a dot */
+ }
+ else { /* integers */
+ lua_Integer n = lua_tointeger(L, arg);
+ const char *format = (n == LUA_MININTEGER) /* corner case? */
+ ? "0x%" LUA_INTEGER_FRMLEN "x" /* use hexa */
+ : LUA_INTEGER_FMT; /* else use default format */
+ nb = l_sprintf(buff, MAX_ITEM, format, (LUAI_UACINT)n);
+ }
+ luaL_addsize(b, nb);
+ break;
+ }
+ case LUA_TNIL: case LUA_TBOOLEAN: {
+ luaL_tolstring(L, arg, NULL);
+ luaL_addvalue(b);
+ break;
+ }
+ default: {
+ luaL_argerror(L, arg, "value has no literal form");
+ }
+ }
+}
+
+
+static const char *scanformat (lua_State *L, const char *strfrmt, char *form) {
+ const char *p = strfrmt;
+ while (*p != '\0' && strchr(FLAGS, *p) != NULL) p++; /* skip flags */
+ if ((size_t)(p - strfrmt) >= sizeof(FLAGS)/sizeof(char))
+ luaL_error(L, "invalid format (repeated flags)");
+ if (isdigit(uchar(*p))) p++; /* skip width */
+ if (isdigit(uchar(*p))) p++; /* (2 digits at most) */
+ if (*p == '.') {
+ p++;
+ if (isdigit(uchar(*p))) p++; /* skip precision */
+ if (isdigit(uchar(*p))) p++; /* (2 digits at most) */
+ }
+ if (isdigit(uchar(*p)))
+ luaL_error(L, "invalid format (width or precision too long)");
+ *(form++) = '%';
+ memcpy(form, strfrmt, ((p - strfrmt) + 1) * sizeof(char));
+ form += (p - strfrmt) + 1;
+ *form = '\0';
+ return p;
+}
+
+
+/*
+** add length modifier into formats
+*/
+static void addlenmod (char *form, const char *lenmod) {
+ size_t l = strlen(form);
+ size_t lm = strlen(lenmod);
+ char spec = form[l - 1];
+ strcpy(form + l - 1, lenmod);
+ form[l + lm - 1] = spec;
+ form[l + lm] = '\0';
+}
+
+
+static int str_format (lua_State *L) {
+ int top = lua_gettop(L);
+ int arg = 1;
+ size_t sfl;
+ const char *strfrmt = luaL_checklstring(L, arg, &sfl);
+ const char *strfrmt_end = strfrmt+sfl;
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ while (strfrmt < strfrmt_end) {
+ if (*strfrmt != L_ESC)
+ luaL_addchar(&b, *strfrmt++);
+ else if (*++strfrmt == L_ESC)
+ luaL_addchar(&b, *strfrmt++); /* %% */
+ else { /* format item */
+ char form[MAX_FORMAT]; /* to store the format ('%...') */
+ char *buff = luaL_prepbuffsize(&b, MAX_ITEM); /* to put formatted item */
+ int nb = 0; /* number of bytes in added item */
+ if (++arg > top)
+ luaL_argerror(L, arg, "no value");
+ strfrmt = scanformat(L, strfrmt, form);
+ switch (*strfrmt++) {
+ case 'c': {
+ nb = l_sprintf(buff, MAX_ITEM, form, (int)luaL_checkinteger(L, arg));
+ break;
+ }
+ case 'd': case 'i':
+ case 'o': case 'u': case 'x': case 'X': {
+ lua_Integer n = luaL_checkinteger(L, arg);
+ addlenmod(form, LUA_INTEGER_FRMLEN);
+ nb = l_sprintf(buff, MAX_ITEM, form, (LUAI_UACINT)n);
+ break;
+ }
+ case 'a': case 'A':
+ addlenmod(form, LUA_NUMBER_FRMLEN);
+ nb = lua_number2strx(L, buff, MAX_ITEM, form,
+ luaL_checknumber(L, arg));
+ break;
+ case 'e': case 'E': case 'f':
+ case 'g': case 'G': {
+ lua_Number n = luaL_checknumber(L, arg);
+ addlenmod(form, LUA_NUMBER_FRMLEN);
+ nb = l_sprintf(buff, MAX_ITEM, form, (LUAI_UACNUMBER)n);
+ break;
+ }
+ case 'q': {
+ addliteral(L, &b, arg);
+ break;
+ }
+ case 's': {
+ size_t l;
+ const char *s = luaL_tolstring(L, arg, &l);
+ if (form[2] == '\0') /* no modifiers? */
+ luaL_addvalue(&b); /* keep entire string */
+ else {
+ luaL_argcheck(L, l == strlen(s), arg, "string contains zeros");
+ if (!strchr(form, '.') && l >= 100) {
+ /* no precision and string is too long to be formatted */
+ luaL_addvalue(&b); /* keep entire string */
+ }
+ else { /* format the string into 'buff' */
+ nb = l_sprintf(buff, MAX_ITEM, form, s);
+ lua_pop(L, 1); /* remove result from 'luaL_tolstring' */
+ }
+ }
+ break;
+ }
+ default: { /* also treat cases 'pnLlh' */
+ return luaL_error(L, "invalid option '%%%c' to 'format'",
+ *(strfrmt - 1));
+ }
+ }
+ lua_assert(nb < MAX_ITEM);
+ luaL_addsize(&b, nb);
+ }
+ }
+ luaL_pushresult(&b);
+ return 1;
+}
+
+/* }====================================================== */
+
+
+/*
+** {======================================================
+** PACK/UNPACK
+** =======================================================
+*/
+
+
+/* value used for padding */
+#if !defined(LUAL_PACKPADBYTE)
+#define LUAL_PACKPADBYTE 0x00
+#endif
+
+/* maximum size for the binary representation of an integer */
+#define MAXINTSIZE 16
+
+/* number of bits in a character */
+#define NB CHAR_BIT
+
+/* mask for one character (NB 1's) */
+#define MC ((1 << NB) - 1)
+
+/* size of a lua_Integer */
+#define SZINT ((int)sizeof(lua_Integer))
+
+
+/* dummy union to get native endianness */
+static const union {
+ int dummy;
+ char little; /* true iff machine is little endian */
+} nativeendian = {1};
+
+
+/* dummy structure to get native alignment requirements */
+struct cD {
+ char c;
+ union { double d; void *p; lua_Integer i; lua_Number n; } u;
+};
+
+#define MAXALIGN (offsetof(struct cD, u))
+
+
+/*
+** Union for serializing floats
+*/
+typedef union Ftypes {
+ float f;
+ double d;
+ lua_Number n;
+ char buff[5 * sizeof(lua_Number)]; /* enough for any float type */
+} Ftypes;
+
+
+/*
+** information to pack/unpack stuff
+*/
+typedef struct Header {
+ lua_State *L;
+ int islittle;
+ int maxalign;
+} Header;
+
+
+/*
+** options for pack/unpack
+*/
+typedef enum KOption {
+ Kint, /* signed integers */
+ Kuint, /* unsigned integers */
+ Kfloat, /* floating-point numbers */
+ Kchar, /* fixed-length strings */
+ Kstring, /* strings with prefixed length */
+ Kzstr, /* zero-terminated strings */
+ Kpadding, /* padding */
+ Kpaddalign, /* padding for alignment */
+ Knop /* no-op (configuration or spaces) */
+} KOption;
+
+
+/*
+** Read an integer numeral from string 'fmt' or return 'df' if
+** there is no numeral
+*/
+static int digit (int c) { return '0' <= c && c <= '9'; }
+
+static int getnum (const char **fmt, int df) {
+ if (!digit(**fmt)) /* no number? */
+ return df; /* return default value */
+ else {
+ int a = 0;
+ do {
+ a = a*10 + (*((*fmt)++) - '0');
+ } while (digit(**fmt) && a <= ((int)MAXSIZE - 9)/10);
+ return a;
+ }
+}
+
+
+/*
+** Read an integer numeral and raises an error if it is larger
+** than the maximum size for integers.
+*/
+static int getnumlimit (Header *h, const char **fmt, int df) {
+ int sz = getnum(fmt, df);
+ if (sz > MAXINTSIZE || sz <= 0)
+ luaL_error(h->L, "integral size (%d) out of limits [1,%d]",
+ sz, MAXINTSIZE);
+ return sz;
+}
+
+
+/*
+** Initialize Header
+*/
+static void initheader (lua_State *L, Header *h) {
+ h->L = L;
+ h->islittle = nativeendian.little;
+ h->maxalign = 1;
+}
+
+
+/*
+** Read and classify next option. 'size' is filled with option's size.
+*/
+static KOption getoption (Header *h, const char **fmt, int *size) {
+ int opt = *((*fmt)++);
+ *size = 0; /* default */
+ switch (opt) {
+ case 'b': *size = sizeof(char); return Kint;
+ case 'B': *size = sizeof(char); return Kuint;
+ case 'h': *size = sizeof(short); return Kint;
+ case 'H': *size = sizeof(short); return Kuint;
+ case 'l': *size = sizeof(long); return Kint;
+ case 'L': *size = sizeof(long); return Kuint;
+ case 'j': *size = sizeof(lua_Integer); return Kint;
+ case 'J': *size = sizeof(lua_Integer); return Kuint;
+ case 'T': *size = sizeof(size_t); return Kuint;
+ case 'f': *size = sizeof(float); return Kfloat;
+ case 'd': *size = sizeof(double); return Kfloat;
+ case 'n': *size = sizeof(lua_Number); return Kfloat;
+ case 'i': *size = getnumlimit(h, fmt, sizeof(int)); return Kint;
+ case 'I': *size = getnumlimit(h, fmt, sizeof(int)); return Kuint;
+ case 's': *size = getnumlimit(h, fmt, sizeof(size_t)); return Kstring;
+ case 'c':
+ *size = getnum(fmt, -1);
+ if (*size == -1)
+ luaL_error(h->L, "missing size for format option 'c'");
+ return Kchar;
+ case 'z': return Kzstr;
+ case 'x': *size = 1; return Kpadding;
+ case 'X': return Kpaddalign;
+ case ' ': break;
+ case '<': h->islittle = 1; break;
+ case '>': h->islittle = 0; break;
+ case '=': h->islittle = nativeendian.little; break;
+ case '!': h->maxalign = getnumlimit(h, fmt, MAXALIGN); break;
+ default: luaL_error(h->L, "invalid format option '%c'", opt);
+ }
+ return Knop;
+}
+
+
+/*
+** Read, classify, and fill other details about the next option.
+** 'psize' is filled with option's size, 'notoalign' with its
+** alignment requirements.
+** Local variable 'size' gets the size to be aligned. (Kpadal option
+** always gets its full alignment, other options are limited by
+** the maximum alignment ('maxalign'). Kchar option needs no alignment
+** despite its size.
+*/
+static KOption getdetails (Header *h, size_t totalsize,
+ const char **fmt, int *psize, int *ntoalign) {
+ KOption opt = getoption(h, fmt, psize);
+ int align = *psize; /* usually, alignment follows size */
+ if (opt == Kpaddalign) { /* 'X' gets alignment from following option */
+ if (**fmt == '\0' || getoption(h, fmt, &align) == Kchar || align == 0)
+ luaL_argerror(h->L, 1, "invalid next option for option 'X'");
+ }
+ if (align <= 1 || opt == Kchar) /* need no alignment? */
+ *ntoalign = 0;
+ else {
+ if (align > h->maxalign) /* enforce maximum alignment */
+ align = h->maxalign;
+ if ((align & (align - 1)) != 0) /* is 'align' not a power of 2? */
+ luaL_argerror(h->L, 1, "format asks for alignment not power of 2");
+ *ntoalign = (align - (int)(totalsize & (align - 1))) & (align - 1);
+ }
+ return opt;
+}
+
+
+/*
+** Pack integer 'n' with 'size' bytes and 'islittle' endianness.
+** The final 'if' handles the case when 'size' is larger than
+** the size of a Lua integer, correcting the extra sign-extension
+** bytes if necessary (by default they would be zeros).
+*/
+static void packint (luaL_Buffer *b, lua_Unsigned n,
+ int islittle, int size, int neg) {
+ char *buff = luaL_prepbuffsize(b, size);
+ int i;
+ buff[islittle ? 0 : size - 1] = (char)(n & MC); /* first byte */
+ for (i = 1; i < size; i++) {
+ n >>= NB;
+ buff[islittle ? i : size - 1 - i] = (char)(n & MC);
+ }
+ if (neg && size > SZINT) { /* negative number need sign extension? */
+ for (i = SZINT; i < size; i++) /* correct extra bytes */
+ buff[islittle ? i : size - 1 - i] = (char)MC;
+ }
+ luaL_addsize(b, size); /* add result to buffer */
+}
+
+
+/*
+** Copy 'size' bytes from 'src' to 'dest', correcting endianness if
+** given 'islittle' is different from native endianness.
+*/
+static void copywithendian (volatile char *dest, volatile const char *src,
+ int size, int islittle) {
+ if (islittle == nativeendian.little) {
+ while (size-- != 0)
+ *(dest++) = *(src++);
+ }
+ else {
+ dest += size - 1;
+ while (size-- != 0)
+ *(dest--) = *(src++);
+ }
+}
+
+
+static int str_pack (lua_State *L) {
+ luaL_Buffer b;
+ Header h;
+ const char *fmt = luaL_checkstring(L, 1); /* format string */
+ int arg = 1; /* current argument to pack */
+ size_t totalsize = 0; /* accumulate total size of result */
+ initheader(L, &h);
+ lua_pushnil(L); /* mark to separate arguments from string buffer */
+ luaL_buffinit(L, &b);
+ while (*fmt != '\0') {
+ int size, ntoalign;
+ KOption opt = getdetails(&h, totalsize, &fmt, &size, &ntoalign);
+ totalsize += ntoalign + size;
+ while (ntoalign-- > 0)
+ luaL_addchar(&b, LUAL_PACKPADBYTE); /* fill alignment */
+ arg++;
+ switch (opt) {
+ case Kint: { /* signed integers */
+ lua_Integer n = luaL_checkinteger(L, arg);
+ if (size < SZINT) { /* need overflow check? */
+ lua_Integer lim = (lua_Integer)1 << ((size * NB) - 1);
+ luaL_argcheck(L, -lim <= n && n < lim, arg, "integer overflow");
+ }
+ packint(&b, (lua_Unsigned)n, h.islittle, size, (n < 0));
+ break;
+ }
+ case Kuint: { /* unsigned integers */
+ lua_Integer n = luaL_checkinteger(L, arg);
+ if (size < SZINT) /* need overflow check? */
+ luaL_argcheck(L, (lua_Unsigned)n < ((lua_Unsigned)1 << (size * NB)),
+ arg, "unsigned overflow");
+ packint(&b, (lua_Unsigned)n, h.islittle, size, 0);
+ break;
+ }
+ case Kfloat: { /* floating-point options */
+ volatile Ftypes u;
+ char *buff = luaL_prepbuffsize(&b, size);
+ lua_Number n = luaL_checknumber(L, arg); /* get argument */
+ if (size == sizeof(u.f)) u.f = (float)n; /* copy it into 'u' */
+ else if (size == sizeof(u.d)) u.d = (double)n;
+ else u.n = n;
+ /* move 'u' to final result, correcting endianness if needed */
+ copywithendian(buff, u.buff, size, h.islittle);
+ luaL_addsize(&b, size);
+ break;
+ }
+ case Kchar: { /* fixed-size string */
+ size_t len;
+ const char *s = luaL_checklstring(L, arg, &len);
+ luaL_argcheck(L, len <= (size_t)size, arg,
+ "string longer than given size");
+ luaL_addlstring(&b, s, len); /* add string */
+ while (len++ < (size_t)size) /* pad extra space */
+ luaL_addchar(&b, LUAL_PACKPADBYTE);
+ break;
+ }
+ case Kstring: { /* strings with length count */
+ size_t len;
+ const char *s = luaL_checklstring(L, arg, &len);
+ luaL_argcheck(L, size >= (int)sizeof(size_t) ||
+ len < ((size_t)1 << (size * NB)),
+ arg, "string length does not fit in given size");
+ packint(&b, (lua_Unsigned)len, h.islittle, size, 0); /* pack length */
+ luaL_addlstring(&b, s, len);
+ totalsize += len;
+ break;
+ }
+ case Kzstr: { /* zero-terminated string */
+ size_t len;
+ const char *s = luaL_checklstring(L, arg, &len);
+ luaL_argcheck(L, strlen(s) == len, arg, "string contains zeros");
+ luaL_addlstring(&b, s, len);
+ luaL_addchar(&b, '\0'); /* add zero at the end */
+ totalsize += len + 1;
+ break;
+ }
+ case Kpadding: luaL_addchar(&b, LUAL_PACKPADBYTE); /* FALLTHROUGH */
+ case Kpaddalign: case Knop:
+ arg--; /* undo increment */
+ break;
+ }
+ }
+ luaL_pushresult(&b);
+ return 1;
+}
+
+
+static int str_packsize (lua_State *L) {
+ Header h;
+ const char *fmt = luaL_checkstring(L, 1); /* format string */
+ size_t totalsize = 0; /* accumulate total size of result */
+ initheader(L, &h);
+ while (*fmt != '\0') {
+ int size, ntoalign;
+ KOption opt = getdetails(&h, totalsize, &fmt, &size, &ntoalign);
+ size += ntoalign; /* total space used by option */
+ luaL_argcheck(L, totalsize <= MAXSIZE - size, 1,
+ "format result too large");
+ totalsize += size;
+ switch (opt) {
+ case Kstring: /* strings with length count */
+ case Kzstr: /* zero-terminated string */
+ luaL_argerror(L, 1, "variable-length format");
+ /* call never return, but to avoid warnings: *//* FALLTHROUGH */
+ default: break;
+ }
+ }
+ lua_pushinteger(L, (lua_Integer)totalsize);
+ return 1;
+}
+
+
+/*
+** Unpack an integer with 'size' bytes and 'islittle' endianness.
+** If size is smaller than the size of a Lua integer and integer
+** is signed, must do sign extension (propagating the sign to the
+** higher bits); if size is larger than the size of a Lua integer,
+** it must check the unread bytes to see whether they do not cause an
+** overflow.
+*/
+static lua_Integer unpackint (lua_State *L, const char *str,
+ int islittle, int size, int issigned) {
+ lua_Unsigned res = 0;
+ int i;
+ int limit = (size <= SZINT) ? size : SZINT;
+ for (i = limit - 1; i >= 0; i--) {
+ res <<= NB;
+ res |= (lua_Unsigned)(unsigned char)str[islittle ? i : size - 1 - i];
+ }
+ if (size < SZINT) { /* real size smaller than lua_Integer? */
+ if (issigned) { /* needs sign extension? */
+ lua_Unsigned mask = (lua_Unsigned)1 << (size*NB - 1);
+ res = ((res ^ mask) - mask); /* do sign extension */
+ }
+ }
+ else if (size > SZINT) { /* must check unread bytes */
+ int mask = (!issigned || (lua_Integer)res >= 0) ? 0 : MC;
+ for (i = limit; i < size; i++) {
+ if ((unsigned char)str[islittle ? i : size - 1 - i] != mask)
+ luaL_error(L, "%d-byte integer does not fit into Lua Integer", size);
+ }
+ }
+ return (lua_Integer)res;
+}
+
+
+static int str_unpack (lua_State *L) {
+ Header h;
+ const char *fmt = luaL_checkstring(L, 1);
+ size_t ld;
+ const char *data = luaL_checklstring(L, 2, &ld);
+ size_t pos = (size_t)posrelat(luaL_optinteger(L, 3, 1), ld) - 1;
+ int n = 0; /* number of results */
+ luaL_argcheck(L, pos <= ld, 3, "initial position out of string");
+ initheader(L, &h);
+ while (*fmt != '\0') {
+ int size, ntoalign;
+ KOption opt = getdetails(&h, pos, &fmt, &size, &ntoalign);
+ if ((size_t)ntoalign + size > ~pos || pos + ntoalign + size > ld)
+ luaL_argerror(L, 2, "data string too short");
+ pos += ntoalign; /* skip alignment */
+ /* stack space for item + next position */
+ luaL_checkstack(L, 2, "too many results");
+ n++;
+ switch (opt) {
+ case Kint:
+ case Kuint: {
+ lua_Integer res = unpackint(L, data + pos, h.islittle, size,
+ (opt == Kint));
+ lua_pushinteger(L, res);
+ break;
+ }
+ case Kfloat: {
+ volatile Ftypes u;
+ lua_Number num;
+ copywithendian(u.buff, data + pos, size, h.islittle);
+ if (size == sizeof(u.f)) num = (lua_Number)u.f;
+ else if (size == sizeof(u.d)) num = (lua_Number)u.d;
+ else num = u.n;
+ lua_pushnumber(L, num);
+ break;
+ }
+ case Kchar: {
+ lua_pushlstring(L, data + pos, size);
+ break;
+ }
+ case Kstring: {
+ size_t len = (size_t)unpackint(L, data + pos, h.islittle, size, 0);
+ luaL_argcheck(L, pos + len + size <= ld, 2, "data string too short");
+ lua_pushlstring(L, data + pos + size, len);
+ pos += len; /* skip string */
+ break;
+ }
+ case Kzstr: {
+ size_t len = (int)strlen(data + pos);
+ lua_pushlstring(L, data + pos, len);
+ pos += len + 1; /* skip string plus final '\0' */
+ break;
+ }
+ case Kpaddalign: case Kpadding: case Knop:
+ n--; /* undo increment */
+ break;
+ }
+ pos += size;
+ }
+ lua_pushinteger(L, pos + 1); /* next position */
+ return n + 1;
+}
+
+/* }====================================================== */
+
+
+static const luaL_Reg strlib[] = {
+ {"byte", str_byte},
+ {"char", str_char},
+ {"dump", str_dump},
+ {"find", str_find},
+ {"format", str_format},
+ {"gmatch", gmatch},
+ {"gsub", str_gsub},
+ {"len", str_len},
+ {"lower", str_lower},
+ {"match", str_match},
+ {"rep", str_rep},
+ {"reverse", str_reverse},
+ {"sub", str_sub},
+ {"upper", str_upper},
+ {"pack", str_pack},
+ {"packsize", str_packsize},
+ {"unpack", str_unpack},
+ {NULL, NULL}
+};
+
+
+static void createmetatable (lua_State *L) {
+ lua_createtable(L, 0, 1); /* table to be metatable for strings */
+ lua_pushliteral(L, ""); /* dummy string */
+ lua_pushvalue(L, -2); /* copy table */
+ lua_setmetatable(L, -2); /* set table as metatable for strings */
+ lua_pop(L, 1); /* pop dummy string */
+ lua_pushvalue(L, -2); /* get string library */
+ lua_setfield(L, -2, "__index"); /* metatable.__index = string */
+ lua_pop(L, 1); /* pop metatable */
+}
+
+
+/*
+** Open string library
+*/
+LUAMOD_API int luaopen_string (lua_State *L) {
+ luaL_newlib(L, strlib);
+ createmetatable(L);
+ return 1;
+}
+
diff --git a/lua/src/ltable.c b/lua/src/ltable.c
new file mode 100644
index 000000000..d080189f2
--- /dev/null
+++ b/lua/src/ltable.c
@@ -0,0 +1,669 @@
+/*
+** $Id: ltable.c,v 2.118 2016/11/07 12:38:35 roberto Exp $
+** Lua tables (hash)
+** See Copyright Notice in lua.h
+*/
+
+#define ltable_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+/*
+** Implementation of tables (aka arrays, objects, or hash tables).
+** Tables keep its elements in two parts: an array part and a hash part.
+** Non-negative integer keys are all candidates to be kept in the array
+** part. The actual size of the array is the largest 'n' such that
+** more than half the slots between 1 and n are in use.
+** Hash uses a mix of chained scatter table with Brent's variation.
+** A main invariant of these tables is that, if an element is not
+** in its main position (i.e. the 'original' position that its hash gives
+** to it), then the colliding element is in its own main position.
+** Hence even when the load factor reaches 100%, performance remains good.
+*/
+
+#include <math.h>
+#include <limits.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lgc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "lvm.h"
+
+
+/*
+** Maximum size of array part (MAXASIZE) is 2^MAXABITS. MAXABITS is
+** the largest integer such that MAXASIZE fits in an unsigned int.
+*/
+#define MAXABITS cast_int(sizeof(int) * CHAR_BIT - 1)
+#define MAXASIZE (1u << MAXABITS)
+
+/*
+** Maximum size of hash part is 2^MAXHBITS. MAXHBITS is the largest
+** integer such that 2^MAXHBITS fits in a signed int. (Note that the
+** maximum number of elements in a table, 2^MAXABITS + 2^MAXHBITS, still
+** fits comfortably in an unsigned int.)
+*/
+#define MAXHBITS (MAXABITS - 1)
+
+
+#define hashpow2(t,n) (gnode(t, lmod((n), sizenode(t))))
+
+#define hashstr(t,str) hashpow2(t, (str)->hash)
+#define hashboolean(t,p) hashpow2(t, p)
+#define hashint(t,i) hashpow2(t, i)
+
+
+/*
+** for some types, it is better to avoid modulus by power of 2, as
+** they tend to have many 2 factors.
+*/
+#define hashmod(t,n) (gnode(t, ((n) % ((sizenode(t)-1)|1))))
+
+
+#define hashpointer(t,p) hashmod(t, point2uint(p))
+
+
+#define dummynode (&dummynode_)
+
+static const Node dummynode_ = {
+ {NILCONSTANT}, /* value */
+ {{NILCONSTANT, 0}} /* key */
+};
+
+
+/*
+** Hash for floating-point numbers.
+** The main computation should be just
+** n = frexp(n, &i); return (n * INT_MAX) + i
+** but there are some numerical subtleties.
+** In a two-complement representation, INT_MAX does not has an exact
+** representation as a float, but INT_MIN does; because the absolute
+** value of 'frexp' is smaller than 1 (unless 'n' is inf/NaN), the
+** absolute value of the product 'frexp * -INT_MIN' is smaller or equal
+** to INT_MAX. Next, the use of 'unsigned int' avoids overflows when
+** adding 'i'; the use of '~u' (instead of '-u') avoids problems with
+** INT_MIN.
+*/
+#if !defined(l_hashfloat)
+static int l_hashfloat (lua_Number n) {
+ int i;
+ lua_Integer ni;
+ n = l_mathop(frexp)(n, &i) * -cast_num(INT_MIN);
+ if (!lua_numbertointeger(n, &ni)) { /* is 'n' inf/-inf/NaN? */
+ lua_assert(luai_numisnan(n) || l_mathop(fabs)(n) == cast_num(HUGE_VAL));
+ return 0;
+ }
+ else { /* normal case */
+ unsigned int u = cast(unsigned int, i) + cast(unsigned int, ni);
+ return cast_int(u <= cast(unsigned int, INT_MAX) ? u : ~u);
+ }
+}
+#endif
+
+
+/*
+** returns the 'main' position of an element in a table (that is, the index
+** of its hash value)
+*/
+static Node *mainposition (const Table *t, const TValue *key) {
+ switch (ttype(key)) {
+ case LUA_TNUMINT:
+ return hashint(t, ivalue(key));
+ case LUA_TNUMFLT:
+ return hashmod(t, l_hashfloat(fltvalue(key)));
+ case LUA_TSHRSTR:
+ return hashstr(t, tsvalue(key));
+ case LUA_TLNGSTR:
+ return hashpow2(t, luaS_hashlongstr(tsvalue(key)));
+ case LUA_TBOOLEAN:
+ return hashboolean(t, bvalue(key));
+ case LUA_TLIGHTUSERDATA:
+ return hashpointer(t, pvalue(key));
+ case LUA_TLCF:
+ return hashpointer(t, fvalue(key));
+ default:
+ lua_assert(!ttisdeadkey(key));
+ return hashpointer(t, gcvalue(key));
+ }
+}
+
+
+/*
+** returns the index for 'key' if 'key' is an appropriate key to live in
+** the array part of the table, 0 otherwise.
+*/
+static unsigned int arrayindex (const TValue *key) {
+ if (ttisinteger(key)) {
+ lua_Integer k = ivalue(key);
+ if (0 < k && (lua_Unsigned)k <= MAXASIZE)
+ return cast(unsigned int, k); /* 'key' is an appropriate array index */
+ }
+ return 0; /* 'key' did not match some condition */
+}
+
+
+/*
+** returns the index of a 'key' for table traversals. First goes all
+** elements in the array part, then elements in the hash part. The
+** beginning of a traversal is signaled by 0.
+*/
+static unsigned int findindex (lua_State *L, Table *t, StkId key) {
+ unsigned int i;
+ if (ttisnil(key)) return 0; /* first iteration */
+ i = arrayindex(key);
+ if (i != 0 && i <= t->sizearray) /* is 'key' inside array part? */
+ return i; /* yes; that's the index */
+ else {
+ int nx;
+ Node *n = mainposition(t, key);
+ for (;;) { /* check whether 'key' is somewhere in the chain */
+ /* key may be dead already, but it is ok to use it in 'next' */
+ if (luaV_rawequalobj(gkey(n), key) ||
+ (ttisdeadkey(gkey(n)) && iscollectable(key) &&
+ deadvalue(gkey(n)) == gcvalue(key))) {
+ i = cast_int(n - gnode(t, 0)); /* key index in hash table */
+ /* hash elements are numbered after array ones */
+ return (i + 1) + t->sizearray;
+ }
+ nx = gnext(n);
+ if (nx == 0)
+ luaG_runerror(L, "invalid key to 'next'"); /* key not found */
+ else n += nx;
+ }
+ }
+}
+
+
+int luaH_next (lua_State *L, Table *t, StkId key) {
+ unsigned int i = findindex(L, t, key); /* find original element */
+ for (; i < t->sizearray; i++) { /* try first array part */
+ if (!ttisnil(&t->array[i])) { /* a non-nil value? */
+ setivalue(key, i + 1);
+ setobj2s(L, key+1, &t->array[i]);
+ return 1;
+ }
+ }
+ for (i -= t->sizearray; cast_int(i) < sizenode(t); i++) { /* hash part */
+ if (!ttisnil(gval(gnode(t, i)))) { /* a non-nil value? */
+ setobj2s(L, key, gkey(gnode(t, i)));
+ setobj2s(L, key+1, gval(gnode(t, i)));
+ return 1;
+ }
+ }
+ return 0; /* no more elements */
+}
+
+
+/*
+** {=============================================================
+** Rehash
+** ==============================================================
+*/
+
+/*
+** Compute the optimal size for the array part of table 't'. 'nums' is a
+** "count array" where 'nums[i]' is the number of integers in the table
+** between 2^(i - 1) + 1 and 2^i. 'pna' enters with the total number of
+** integer keys in the table and leaves with the number of keys that
+** will go to the array part; return the optimal size.
+*/
+static unsigned int computesizes (unsigned int nums[], unsigned int *pna) {
+ int i;
+ unsigned int twotoi; /* 2^i (candidate for optimal size) */
+ unsigned int a = 0; /* number of elements smaller than 2^i */
+ unsigned int na = 0; /* number of elements to go to array part */
+ unsigned int optimal = 0; /* optimal size for array part */
+ /* loop while keys can fill more than half of total size */
+ for (i = 0, twotoi = 1; *pna > twotoi / 2; i++, twotoi *= 2) {
+ if (nums[i] > 0) {
+ a += nums[i];
+ if (a > twotoi/2) { /* more than half elements present? */
+ optimal = twotoi; /* optimal size (till now) */
+ na = a; /* all elements up to 'optimal' will go to array part */
+ }
+ }
+ }
+ lua_assert((optimal == 0 || optimal / 2 < na) && na <= optimal);
+ *pna = na;
+ return optimal;
+}
+
+
+static int countint (const TValue *key, unsigned int *nums) {
+ unsigned int k = arrayindex(key);
+ if (k != 0) { /* is 'key' an appropriate array index? */
+ nums[luaO_ceillog2(k)]++; /* count as such */
+ return 1;
+ }
+ else
+ return 0;
+}
+
+
+/*
+** Count keys in array part of table 't': Fill 'nums[i]' with
+** number of keys that will go into corresponding slice and return
+** total number of non-nil keys.
+*/
+static unsigned int numusearray (const Table *t, unsigned int *nums) {
+ int lg;
+ unsigned int ttlg; /* 2^lg */
+ unsigned int ause = 0; /* summation of 'nums' */
+ unsigned int i = 1; /* count to traverse all array keys */
+ /* traverse each slice */
+ for (lg = 0, ttlg = 1; lg <= MAXABITS; lg++, ttlg *= 2) {
+ unsigned int lc = 0; /* counter */
+ unsigned int lim = ttlg;
+ if (lim > t->sizearray) {
+ lim = t->sizearray; /* adjust upper limit */
+ if (i > lim)
+ break; /* no more elements to count */
+ }
+ /* count elements in range (2^(lg - 1), 2^lg] */
+ for (; i <= lim; i++) {
+ if (!ttisnil(&t->array[i-1]))
+ lc++;
+ }
+ nums[lg] += lc;
+ ause += lc;
+ }
+ return ause;
+}
+
+
+static int numusehash (const Table *t, unsigned int *nums, unsigned int *pna) {
+ int totaluse = 0; /* total number of elements */
+ int ause = 0; /* elements added to 'nums' (can go to array part) */
+ int i = sizenode(t);
+ while (i--) {
+ Node *n = &t->node[i];
+ if (!ttisnil(gval(n))) {
+ ause += countint(gkey(n), nums);
+ totaluse++;
+ }
+ }
+ *pna += ause;
+ return totaluse;
+}
+
+
+static void setarrayvector (lua_State *L, Table *t, unsigned int size) {
+ unsigned int i;
+ luaM_reallocvector(L, t->array, t->sizearray, size, TValue);
+ for (i=t->sizearray; i<size; i++)
+ setnilvalue(&t->array[i]);
+ t->sizearray = size;
+}
+
+
+static void setnodevector (lua_State *L, Table *t, unsigned int size) {
+ if (size == 0) { /* no elements to hash part? */
+ t->node = cast(Node *, dummynode); /* use common 'dummynode' */
+ t->lsizenode = 0;
+ t->lastfree = NULL; /* signal that it is using dummy node */
+ }
+ else {
+ int i;
+ int lsize = luaO_ceillog2(size);
+ if (lsize > MAXHBITS)
+ luaG_runerror(L, "table overflow");
+ size = twoto(lsize);
+ t->node = luaM_newvector(L, size, Node);
+ for (i = 0; i < (int)size; i++) {
+ Node *n = gnode(t, i);
+ gnext(n) = 0;
+ setnilvalue(wgkey(n));
+ setnilvalue(gval(n));
+ }
+ t->lsizenode = cast_byte(lsize);
+ t->lastfree = gnode(t, size); /* all positions are free */
+ }
+}
+
+
+void luaH_resize (lua_State *L, Table *t, unsigned int nasize,
+ unsigned int nhsize) {
+ unsigned int i;
+ int j;
+ unsigned int oldasize = t->sizearray;
+ int oldhsize = allocsizenode(t);
+ Node *nold = t->node; /* save old hash ... */
+ if (nasize > oldasize) /* array part must grow? */
+ setarrayvector(L, t, nasize);
+ /* create new hash part with appropriate size */
+ setnodevector(L, t, nhsize);
+ if (nasize < oldasize) { /* array part must shrink? */
+ t->sizearray = nasize;
+ /* re-insert elements from vanishing slice */
+ for (i=nasize; i<oldasize; i++) {
+ if (!ttisnil(&t->array[i]))
+ luaH_setint(L, t, i + 1, &t->array[i]);
+ }
+ /* shrink array */
+ luaM_reallocvector(L, t->array, oldasize, nasize, TValue);
+ }
+ /* re-insert elements from hash part */
+ for (j = oldhsize - 1; j >= 0; j--) {
+ Node *old = nold + j;
+ if (!ttisnil(gval(old))) {
+ /* doesn't need barrier/invalidate cache, as entry was
+ already present in the table */
+ setobjt2t(L, luaH_set(L, t, gkey(old)), gval(old));
+ }
+ }
+ if (oldhsize > 0) /* not the dummy node? */
+ luaM_freearray(L, nold, cast(size_t, oldhsize)); /* free old hash */
+}
+
+
+void luaH_resizearray (lua_State *L, Table *t, unsigned int nasize) {
+ int nsize = allocsizenode(t);
+ luaH_resize(L, t, nasize, nsize);
+}
+
+/*
+** nums[i] = number of keys 'k' where 2^(i - 1) < k <= 2^i
+*/
+static void rehash (lua_State *L, Table *t, const TValue *ek) {
+ unsigned int asize; /* optimal size for array part */
+ unsigned int na; /* number of keys in the array part */
+ unsigned int nums[MAXABITS + 1];
+ int i;
+ int totaluse;
+ for (i = 0; i <= MAXABITS; i++) nums[i] = 0; /* reset counts */
+ na = numusearray(t, nums); /* count keys in array part */
+ totaluse = na; /* all those keys are integer keys */
+ totaluse += numusehash(t, nums, &na); /* count keys in hash part */
+ /* count extra key */
+ na += countint(ek, nums);
+ totaluse++;
+ /* compute new size for array part */
+ asize = computesizes(nums, &na);
+ /* resize the table to new computed sizes */
+ luaH_resize(L, t, asize, totaluse - na);
+}
+
+
+
+/*
+** }=============================================================
+*/
+
+
+Table *luaH_new (lua_State *L) {
+ GCObject *o = luaC_newobj(L, LUA_TTABLE, sizeof(Table));
+ Table *t = gco2t(o);
+ t->metatable = NULL;
+ t->flags = cast_byte(~0);
+ t->array = NULL;
+ t->sizearray = 0;
+ setnodevector(L, t, 0);
+ return t;
+}
+
+
+void luaH_free (lua_State *L, Table *t) {
+ if (!isdummy(t))
+ luaM_freearray(L, t->node, cast(size_t, sizenode(t)));
+ luaM_freearray(L, t->array, t->sizearray);
+ luaM_free(L, t);
+}
+
+
+static Node *getfreepos (Table *t) {
+ if (!isdummy(t)) {
+ while (t->lastfree > t->node) {
+ t->lastfree--;
+ if (ttisnil(gkey(t->lastfree)))
+ return t->lastfree;
+ }
+ }
+ return NULL; /* could not find a free place */
+}
+
+
+
+/*
+** inserts a new key into a hash table; first, check whether key's main
+** position is free. If not, check whether colliding node is in its main
+** position or not: if it is not, move colliding node to an empty place and
+** put new key in its main position; otherwise (colliding node is in its main
+** position), new key goes to an empty position.
+*/
+TValue *luaH_newkey (lua_State *L, Table *t, const TValue *key) {
+ Node *mp;
+ TValue aux;
+ if (ttisnil(key)) luaG_runerror(L, "table index is nil");
+ else if (ttisfloat(key)) {
+ lua_Integer k;
+ if (luaV_tointeger(key, &k, 0)) { /* does index fit in an integer? */
+ setivalue(&aux, k);
+ key = &aux; /* insert it as an integer */
+ }
+ else if (luai_numisnan(fltvalue(key)))
+ luaG_runerror(L, "table index is NaN");
+ }
+ mp = mainposition(t, key);
+ if (!ttisnil(gval(mp)) || isdummy(t)) { /* main position is taken? */
+ Node *othern;
+ Node *f = getfreepos(t); /* get a free place */
+ if (f == NULL) { /* cannot find a free place? */
+ rehash(L, t, key); /* grow table */
+ /* whatever called 'newkey' takes care of TM cache */
+ return luaH_set(L, t, key); /* insert key into grown table */
+ }
+ lua_assert(!isdummy(t));
+ othern = mainposition(t, gkey(mp));
+ if (othern != mp) { /* is colliding node out of its main position? */
+ /* yes; move colliding node into free position */
+ while (othern + gnext(othern) != mp) /* find previous */
+ othern += gnext(othern);
+ gnext(othern) = cast_int(f - othern); /* rechain to point to 'f' */
+ *f = *mp; /* copy colliding node into free pos. (mp->next also goes) */
+ if (gnext(mp) != 0) {
+ gnext(f) += cast_int(mp - f); /* correct 'next' */
+ gnext(mp) = 0; /* now 'mp' is free */
+ }
+ setnilvalue(gval(mp));
+ }
+ else { /* colliding node is in its own main position */
+ /* new node will go into free position */
+ if (gnext(mp) != 0)
+ gnext(f) = cast_int((mp + gnext(mp)) - f); /* chain new position */
+ else lua_assert(gnext(f) == 0);
+ gnext(mp) = cast_int(f - mp);
+ mp = f;
+ }
+ }
+ setnodekey(L, &mp->i_key, key);
+ luaC_barrierback(L, t, key);
+ lua_assert(ttisnil(gval(mp)));
+ return gval(mp);
+}
+
+
+/*
+** search function for integers
+*/
+const TValue *luaH_getint (Table *t, lua_Integer key) {
+ /* (1 <= key && key <= t->sizearray) */
+ if (l_castS2U(key) - 1 < t->sizearray)
+ return &t->array[key - 1];
+ else {
+ Node *n = hashint(t, key);
+ for (;;) { /* check whether 'key' is somewhere in the chain */
+ if (ttisinteger(gkey(n)) && ivalue(gkey(n)) == key)
+ return gval(n); /* that's it */
+ else {
+ int nx = gnext(n);
+ if (nx == 0) break;
+ n += nx;
+ }
+ }
+ return luaO_nilobject;
+ }
+}
+
+
+/*
+** search function for short strings
+*/
+const TValue *luaH_getshortstr (Table *t, TString *key) {
+ Node *n = hashstr(t, key);
+ lua_assert(key->tt == LUA_TSHRSTR);
+ for (;;) { /* check whether 'key' is somewhere in the chain */
+ const TValue *k = gkey(n);
+ if (ttisshrstring(k) && eqshrstr(tsvalue(k), key))
+ return gval(n); /* that's it */
+ else {
+ int nx = gnext(n);
+ if (nx == 0)
+ return luaO_nilobject; /* not found */
+ n += nx;
+ }
+ }
+}
+
+
+/*
+** "Generic" get version. (Not that generic: not valid for integers,
+** which may be in array part, nor for floats with integral values.)
+*/
+static const TValue *getgeneric (Table *t, const TValue *key) {
+ Node *n = mainposition(t, key);
+ for (;;) { /* check whether 'key' is somewhere in the chain */
+ if (luaV_rawequalobj(gkey(n), key))
+ return gval(n); /* that's it */
+ else {
+ int nx = gnext(n);
+ if (nx == 0)
+ return luaO_nilobject; /* not found */
+ n += nx;
+ }
+ }
+}
+
+
+const TValue *luaH_getstr (Table *t, TString *key) {
+ if (key->tt == LUA_TSHRSTR)
+ return luaH_getshortstr(t, key);
+ else { /* for long strings, use generic case */
+ TValue ko;
+ setsvalue(cast(lua_State *, NULL), &ko, key);
+ return getgeneric(t, &ko);
+ }
+}
+
+
+/*
+** main search function
+*/
+const TValue *luaH_get (Table *t, const TValue *key) {
+ switch (ttype(key)) {
+ case LUA_TSHRSTR: return luaH_getshortstr(t, tsvalue(key));
+ case LUA_TNUMINT: return luaH_getint(t, ivalue(key));
+ case LUA_TNIL: return luaO_nilobject;
+ case LUA_TNUMFLT: {
+ lua_Integer k;
+ if (luaV_tointeger(key, &k, 0)) /* index is int? */
+ return luaH_getint(t, k); /* use specialized version */
+ /* else... */
+ } /* FALLTHROUGH */
+ default:
+ return getgeneric(t, key);
+ }
+}
+
+
+/*
+** beware: when using this function you probably need to check a GC
+** barrier and invalidate the TM cache.
+*/
+TValue *luaH_set (lua_State *L, Table *t, const TValue *key) {
+ const TValue *p = luaH_get(t, key);
+ if (p != luaO_nilobject)
+ return cast(TValue *, p);
+ else return luaH_newkey(L, t, key);
+}
+
+
+void luaH_setint (lua_State *L, Table *t, lua_Integer key, TValue *value) {
+ const TValue *p = luaH_getint(t, key);
+ TValue *cell;
+ if (p != luaO_nilobject)
+ cell = cast(TValue *, p);
+ else {
+ TValue k;
+ setivalue(&k, key);
+ cell = luaH_newkey(L, t, &k);
+ }
+ setobj2t(L, cell, value);
+}
+
+
+static int unbound_search (Table *t, unsigned int j) {
+ unsigned int i = j; /* i is zero or a present index */
+ j++;
+ /* find 'i' and 'j' such that i is present and j is not */
+ while (!ttisnil(luaH_getint(t, j))) {
+ i = j;
+ if (j > cast(unsigned int, MAX_INT)/2) { /* overflow? */
+ /* table was built with bad purposes: resort to linear search */
+ i = 1;
+ while (!ttisnil(luaH_getint(t, i))) i++;
+ return i - 1;
+ }
+ j *= 2;
+ }
+ /* now do a binary search between them */
+ while (j - i > 1) {
+ unsigned int m = (i+j)/2;
+ if (ttisnil(luaH_getint(t, m))) j = m;
+ else i = m;
+ }
+ return i;
+}
+
+
+/*
+** Try to find a boundary in table 't'. A 'boundary' is an integer index
+** such that t[i] is non-nil and t[i+1] is nil (and 0 if t[1] is nil).
+*/
+int luaH_getn (Table *t) {
+ unsigned int j = t->sizearray;
+ if (j > 0 && ttisnil(&t->array[j - 1])) {
+ /* there is a boundary in the array part: (binary) search for it */
+ unsigned int i = 0;
+ while (j - i > 1) {
+ unsigned int m = (i+j)/2;
+ if (ttisnil(&t->array[m - 1])) j = m;
+ else i = m;
+ }
+ return i;
+ }
+ /* else must find a boundary in hash part */
+ else if (isdummy(t)) /* hash part is empty? */
+ return j; /* that is easy... */
+ else return unbound_search(t, j);
+}
+
+
+
+#if defined(LUA_DEBUG)
+
+Node *luaH_mainposition (const Table *t, const TValue *key) {
+ return mainposition(t, key);
+}
+
+int luaH_isdummy (const Table *t) { return isdummy(t); }
+
+#endif
diff --git a/lua/src/ltable.h b/lua/src/ltable.h
new file mode 100644
index 000000000..6da9024fe
--- /dev/null
+++ b/lua/src/ltable.h
@@ -0,0 +1,66 @@
+/*
+** $Id: ltable.h,v 2.23 2016/12/22 13:08:50 roberto Exp $
+** Lua tables (hash)
+** See Copyright Notice in lua.h
+*/
+
+#ifndef ltable_h
+#define ltable_h
+
+#include "lobject.h"
+
+
+#define gnode(t,i) (&(t)->node[i])
+#define gval(n) (&(n)->i_val)
+#define gnext(n) ((n)->i_key.nk.next)
+
+
+/* 'const' to avoid wrong writings that can mess up field 'next' */
+#define gkey(n) cast(const TValue*, (&(n)->i_key.tvk))
+
+/*
+** writable version of 'gkey'; allows updates to individual fields,
+** but not to the whole (which has incompatible type)
+*/
+#define wgkey(n) (&(n)->i_key.nk)
+
+#define invalidateTMcache(t) ((t)->flags = 0)
+
+
+/* true when 't' is using 'dummynode' as its hash part */
+#define isdummy(t) ((t)->lastfree == NULL)
+
+
+/* allocated size for hash nodes */
+#define allocsizenode(t) (isdummy(t) ? 0 : sizenode(t))
+
+
+/* returns the key, given the value of a table entry */
+#define keyfromval(v) \
+ (gkey(cast(Node *, cast(char *, (v)) - offsetof(Node, i_val))))
+
+
+LUAI_FUNC const TValue *luaH_getint (Table *t, lua_Integer key);
+LUAI_FUNC void luaH_setint (lua_State *L, Table *t, lua_Integer key,
+ TValue *value);
+LUAI_FUNC const TValue *luaH_getshortstr (Table *t, TString *key);
+LUAI_FUNC const TValue *luaH_getstr (Table *t, TString *key);
+LUAI_FUNC const TValue *luaH_get (Table *t, const TValue *key);
+LUAI_FUNC TValue *luaH_newkey (lua_State *L, Table *t, const TValue *key);
+LUAI_FUNC TValue *luaH_set (lua_State *L, Table *t, const TValue *key);
+LUAI_FUNC Table *luaH_new (lua_State *L);
+LUAI_FUNC void luaH_resize (lua_State *L, Table *t, unsigned int nasize,
+ unsigned int nhsize);
+LUAI_FUNC void luaH_resizearray (lua_State *L, Table *t, unsigned int nasize);
+LUAI_FUNC void luaH_free (lua_State *L, Table *t);
+LUAI_FUNC int luaH_next (lua_State *L, Table *t, StkId key);
+LUAI_FUNC int luaH_getn (Table *t);
+
+
+#if defined(LUA_DEBUG)
+LUAI_FUNC Node *luaH_mainposition (const Table *t, const TValue *key);
+LUAI_FUNC int luaH_isdummy (const Table *t);
+#endif
+
+
+#endif
diff --git a/lua/src/ltablib.c b/lua/src/ltablib.c
new file mode 100644
index 000000000..98b2f8713
--- /dev/null
+++ b/lua/src/ltablib.c
@@ -0,0 +1,450 @@
+/*
+** $Id: ltablib.c,v 1.93 2016/02/25 19:41:54 roberto Exp $
+** Library for Table Manipulation
+** See Copyright Notice in lua.h
+*/
+
+#define ltablib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <limits.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+
+/*
+** Operations that an object must define to mimic a table
+** (some functions only need some of them)
+*/
+#define TAB_R 1 /* read */
+#define TAB_W 2 /* write */
+#define TAB_L 4 /* length */
+#define TAB_RW (TAB_R | TAB_W) /* read/write */
+
+
+#define aux_getn(L,n,w) (checktab(L, n, (w) | TAB_L), luaL_len(L, n))
+
+
+static int checkfield (lua_State *L, const char *key, int n) {
+ lua_pushstring(L, key);
+ return (lua_rawget(L, -n) != LUA_TNIL);
+}
+
+
+/*
+** Check that 'arg' either is a table or can behave like one (that is,
+** has a metatable with the required metamethods)
+*/
+static void checktab (lua_State *L, int arg, int what) {
+ if (lua_type(L, arg) != LUA_TTABLE) { /* is it not a table? */
+ int n = 1; /* number of elements to pop */
+ if (lua_getmetatable(L, arg) && /* must have metatable */
+ (!(what & TAB_R) || checkfield(L, "__index", ++n)) &&
+ (!(what & TAB_W) || checkfield(L, "__newindex", ++n)) &&
+ (!(what & TAB_L) || checkfield(L, "__len", ++n))) {
+ lua_pop(L, n); /* pop metatable and tested metamethods */
+ }
+ else
+ luaL_checktype(L, arg, LUA_TTABLE); /* force an error */
+ }
+}
+
+
+#if defined(LUA_COMPAT_MAXN)
+static int maxn (lua_State *L) {
+ lua_Number max = 0;
+ luaL_checktype(L, 1, LUA_TTABLE);
+ lua_pushnil(L); /* first key */
+ while (lua_next(L, 1)) {
+ lua_pop(L, 1); /* remove value */
+ if (lua_type(L, -1) == LUA_TNUMBER) {
+ lua_Number v = lua_tonumber(L, -1);
+ if (v > max) max = v;
+ }
+ }
+ lua_pushnumber(L, max);
+ return 1;
+}
+#endif
+
+
+static int tinsert (lua_State *L) {
+ lua_Integer e = aux_getn(L, 1, TAB_RW) + 1; /* first empty element */
+ lua_Integer pos; /* where to insert new element */
+ switch (lua_gettop(L)) {
+ case 2: { /* called with only 2 arguments */
+ pos = e; /* insert new element at the end */
+ break;
+ }
+ case 3: {
+ lua_Integer i;
+ pos = luaL_checkinteger(L, 2); /* 2nd argument is the position */
+ luaL_argcheck(L, 1 <= pos && pos <= e, 2, "position out of bounds");
+ for (i = e; i > pos; i--) { /* move up elements */
+ lua_geti(L, 1, i - 1);
+ lua_seti(L, 1, i); /* t[i] = t[i - 1] */
+ }
+ break;
+ }
+ default: {
+ return luaL_error(L, "wrong number of arguments to 'insert'");
+ }
+ }
+ lua_seti(L, 1, pos); /* t[pos] = v */
+ return 0;
+}
+
+
+static int tremove (lua_State *L) {
+ lua_Integer size = aux_getn(L, 1, TAB_RW);
+ lua_Integer pos = luaL_optinteger(L, 2, size);
+ if (pos != size) /* validate 'pos' if given */
+ luaL_argcheck(L, 1 <= pos && pos <= size + 1, 1, "position out of bounds");
+ lua_geti(L, 1, pos); /* result = t[pos] */
+ for ( ; pos < size; pos++) {
+ lua_geti(L, 1, pos + 1);
+ lua_seti(L, 1, pos); /* t[pos] = t[pos + 1] */
+ }
+ lua_pushnil(L);
+ lua_seti(L, 1, pos); /* t[pos] = nil */
+ return 1;
+}
+
+
+/*
+** Copy elements (1[f], ..., 1[e]) into (tt[t], tt[t+1], ...). Whenever
+** possible, copy in increasing order, which is better for rehashing.
+** "possible" means destination after original range, or smaller
+** than origin, or copying to another table.
+*/
+static int tmove (lua_State *L) {
+ lua_Integer f = luaL_checkinteger(L, 2);
+ lua_Integer e = luaL_checkinteger(L, 3);
+ lua_Integer t = luaL_checkinteger(L, 4);
+ int tt = !lua_isnoneornil(L, 5) ? 5 : 1; /* destination table */
+ checktab(L, 1, TAB_R);
+ checktab(L, tt, TAB_W);
+ if (e >= f) { /* otherwise, nothing to move */
+ lua_Integer n, i;
+ luaL_argcheck(L, f > 0 || e < LUA_MAXINTEGER + f, 3,
+ "too many elements to move");
+ n = e - f + 1; /* number of elements to move */
+ luaL_argcheck(L, t <= LUA_MAXINTEGER - n + 1, 4,
+ "destination wrap around");
+ if (t > e || t <= f || (tt != 1 && !lua_compare(L, 1, tt, LUA_OPEQ))) {
+ for (i = 0; i < n; i++) {
+ lua_geti(L, 1, f + i);
+ lua_seti(L, tt, t + i);
+ }
+ }
+ else {
+ for (i = n - 1; i >= 0; i--) {
+ lua_geti(L, 1, f + i);
+ lua_seti(L, tt, t + i);
+ }
+ }
+ }
+ lua_pushvalue(L, tt); /* return destination table */
+ return 1;
+}
+
+
+static void addfield (lua_State *L, luaL_Buffer *b, lua_Integer i) {
+ lua_geti(L, 1, i);
+ if (!lua_isstring(L, -1))
+ luaL_error(L, "invalid value (%s) at index %d in table for 'concat'",
+ luaL_typename(L, -1), i);
+ luaL_addvalue(b);
+}
+
+
+static int tconcat (lua_State *L) {
+ luaL_Buffer b;
+ lua_Integer last = aux_getn(L, 1, TAB_R);
+ size_t lsep;
+ const char *sep = luaL_optlstring(L, 2, "", &lsep);
+ lua_Integer i = luaL_optinteger(L, 3, 1);
+ last = luaL_optinteger(L, 4, last);
+ luaL_buffinit(L, &b);
+ for (; i < last; i++) {
+ addfield(L, &b, i);
+ luaL_addlstring(&b, sep, lsep);
+ }
+ if (i == last) /* add last value (if interval was not empty) */
+ addfield(L, &b, i);
+ luaL_pushresult(&b);
+ return 1;
+}
+
+
+/*
+** {======================================================
+** Pack/unpack
+** =======================================================
+*/
+
+static int pack (lua_State *L) {
+ int i;
+ int n = lua_gettop(L); /* number of elements to pack */
+ lua_createtable(L, n, 1); /* create result table */
+ lua_insert(L, 1); /* put it at index 1 */
+ for (i = n; i >= 1; i--) /* assign elements */
+ lua_seti(L, 1, i);
+ lua_pushinteger(L, n);
+ lua_setfield(L, 1, "n"); /* t.n = number of elements */
+ return 1; /* return table */
+}
+
+
+static int unpack (lua_State *L) {
+ lua_Unsigned n;
+ lua_Integer i = luaL_optinteger(L, 2, 1);
+ lua_Integer e = luaL_opt(L, luaL_checkinteger, 3, luaL_len(L, 1));
+ if (i > e) return 0; /* empty range */
+ n = (lua_Unsigned)e - i; /* number of elements minus 1 (avoid overflows) */
+ if (n >= (unsigned int)INT_MAX || !lua_checkstack(L, (int)(++n)))
+ return luaL_error(L, "too many results to unpack");
+ for (; i < e; i++) { /* push arg[i..e - 1] (to avoid overflows) */
+ lua_geti(L, 1, i);
+ }
+ lua_geti(L, 1, e); /* push last element */
+ return (int)n;
+}
+
+/* }====================================================== */
+
+
+
+/*
+** {======================================================
+** Quicksort
+** (based on 'Algorithms in MODULA-3', Robert Sedgewick;
+** Addison-Wesley, 1993.)
+** =======================================================
+*/
+
+
+/* type for array indices */
+typedef unsigned int IdxT;
+
+
+/*
+** Produce a "random" 'unsigned int' to randomize pivot choice. This
+** macro is used only when 'sort' detects a big imbalance in the result
+** of a partition. (If you don't want/need this "randomness", ~0 is a
+** good choice.)
+*/
+#if !defined(l_randomizePivot) /* { */
+
+#include <time.h>
+
+/* size of 'e' measured in number of 'unsigned int's */
+#define sof(e) (sizeof(e) / sizeof(unsigned int))
+
+/*
+** Use 'time' and 'clock' as sources of "randomness". Because we don't
+** know the types 'clock_t' and 'time_t', we cannot cast them to
+** anything without risking overflows. A safe way to use their values
+** is to copy them to an array of a known type and use the array values.
+*/
+static unsigned int l_randomizePivot (void) {
+ clock_t c = clock();
+ time_t t = time(NULL);
+ unsigned int buff[sof(c) + sof(t)];
+ unsigned int i, rnd = 0;
+ memcpy(buff, &c, sof(c) * sizeof(unsigned int));
+ memcpy(buff + sof(c), &t, sof(t) * sizeof(unsigned int));
+ for (i = 0; i < sof(buff); i++)
+ rnd += buff[i];
+ return rnd;
+}
+
+#endif /* } */
+
+
+/* arrays larger than 'RANLIMIT' may use randomized pivots */
+#define RANLIMIT 100u
+
+
+static void set2 (lua_State *L, IdxT i, IdxT j) {
+ lua_seti(L, 1, i);
+ lua_seti(L, 1, j);
+}
+
+
+/*
+** Return true iff value at stack index 'a' is less than the value at
+** index 'b' (according to the order of the sort).
+*/
+static int sort_comp (lua_State *L, int a, int b) {
+ if (lua_isnil(L, 2)) /* no function? */
+ return lua_compare(L, a, b, LUA_OPLT); /* a < b */
+ else { /* function */
+ int res;
+ lua_pushvalue(L, 2); /* push function */
+ lua_pushvalue(L, a-1); /* -1 to compensate function */
+ lua_pushvalue(L, b-2); /* -2 to compensate function and 'a' */
+ lua_call(L, 2, 1); /* call function */
+ res = lua_toboolean(L, -1); /* get result */
+ lua_pop(L, 1); /* pop result */
+ return res;
+ }
+}
+
+
+/*
+** Does the partition: Pivot P is at the top of the stack.
+** precondition: a[lo] <= P == a[up-1] <= a[up],
+** so it only needs to do the partition from lo + 1 to up - 2.
+** Pos-condition: a[lo .. i - 1] <= a[i] == P <= a[i + 1 .. up]
+** returns 'i'.
+*/
+static IdxT partition (lua_State *L, IdxT lo, IdxT up) {
+ IdxT i = lo; /* will be incremented before first use */
+ IdxT j = up - 1; /* will be decremented before first use */
+ /* loop invariant: a[lo .. i] <= P <= a[j .. up] */
+ for (;;) {
+ /* next loop: repeat ++i while a[i] < P */
+ while (lua_geti(L, 1, ++i), sort_comp(L, -1, -2)) {
+ if (i == up - 1) /* a[i] < P but a[up - 1] == P ?? */
+ luaL_error(L, "invalid order function for sorting");
+ lua_pop(L, 1); /* remove a[i] */
+ }
+ /* after the loop, a[i] >= P and a[lo .. i - 1] < P */
+ /* next loop: repeat --j while P < a[j] */
+ while (lua_geti(L, 1, --j), sort_comp(L, -3, -1)) {
+ if (j < i) /* j < i but a[j] > P ?? */
+ luaL_error(L, "invalid order function for sorting");
+ lua_pop(L, 1); /* remove a[j] */
+ }
+ /* after the loop, a[j] <= P and a[j + 1 .. up] >= P */
+ if (j < i) { /* no elements out of place? */
+ /* a[lo .. i - 1] <= P <= a[j + 1 .. i .. up] */
+ lua_pop(L, 1); /* pop a[j] */
+ /* swap pivot (a[up - 1]) with a[i] to satisfy pos-condition */
+ set2(L, up - 1, i);
+ return i;
+ }
+ /* otherwise, swap a[i] - a[j] to restore invariant and repeat */
+ set2(L, i, j);
+ }
+}
+
+
+/*
+** Choose an element in the middle (2nd-3th quarters) of [lo,up]
+** "randomized" by 'rnd'
+*/
+static IdxT choosePivot (IdxT lo, IdxT up, unsigned int rnd) {
+ IdxT r4 = (up - lo) / 4; /* range/4 */
+ IdxT p = rnd % (r4 * 2) + (lo + r4);
+ lua_assert(lo + r4 <= p && p <= up - r4);
+ return p;
+}
+
+
+/*
+** QuickSort algorithm (recursive function)
+*/
+static void auxsort (lua_State *L, IdxT lo, IdxT up,
+ unsigned int rnd) {
+ while (lo < up) { /* loop for tail recursion */
+ IdxT p; /* Pivot index */
+ IdxT n; /* to be used later */
+ /* sort elements 'lo', 'p', and 'up' */
+ lua_geti(L, 1, lo);
+ lua_geti(L, 1, up);
+ if (sort_comp(L, -1, -2)) /* a[up] < a[lo]? */
+ set2(L, lo, up); /* swap a[lo] - a[up] */
+ else
+ lua_pop(L, 2); /* remove both values */
+ if (up - lo == 1) /* only 2 elements? */
+ return; /* already sorted */
+ if (up - lo < RANLIMIT || rnd == 0) /* small interval or no randomize? */
+ p = (lo + up)/2; /* middle element is a good pivot */
+ else /* for larger intervals, it is worth a random pivot */
+ p = choosePivot(lo, up, rnd);
+ lua_geti(L, 1, p);
+ lua_geti(L, 1, lo);
+ if (sort_comp(L, -2, -1)) /* a[p] < a[lo]? */
+ set2(L, p, lo); /* swap a[p] - a[lo] */
+ else {
+ lua_pop(L, 1); /* remove a[lo] */
+ lua_geti(L, 1, up);
+ if (sort_comp(L, -1, -2)) /* a[up] < a[p]? */
+ set2(L, p, up); /* swap a[up] - a[p] */
+ else
+ lua_pop(L, 2);
+ }
+ if (up - lo == 2) /* only 3 elements? */
+ return; /* already sorted */
+ lua_geti(L, 1, p); /* get middle element (Pivot) */
+ lua_pushvalue(L, -1); /* push Pivot */
+ lua_geti(L, 1, up - 1); /* push a[up - 1] */
+ set2(L, p, up - 1); /* swap Pivot (a[p]) with a[up - 1] */
+ p = partition(L, lo, up);
+ /* a[lo .. p - 1] <= a[p] == P <= a[p + 1 .. up] */
+ if (p - lo < up - p) { /* lower interval is smaller? */
+ auxsort(L, lo, p - 1, rnd); /* call recursively for lower interval */
+ n = p - lo; /* size of smaller interval */
+ lo = p + 1; /* tail call for [p + 1 .. up] (upper interval) */
+ }
+ else {
+ auxsort(L, p + 1, up, rnd); /* call recursively for upper interval */
+ n = up - p; /* size of smaller interval */
+ up = p - 1; /* tail call for [lo .. p - 1] (lower interval) */
+ }
+ if ((up - lo) / 128 > n) /* partition too imbalanced? */
+ rnd = l_randomizePivot(); /* try a new randomization */
+ } /* tail call auxsort(L, lo, up, rnd) */
+}
+
+
+static int sort (lua_State *L) {
+ lua_Integer n = aux_getn(L, 1, TAB_RW);
+ if (n > 1) { /* non-trivial interval? */
+ luaL_argcheck(L, n < INT_MAX, 1, "array too big");
+ if (!lua_isnoneornil(L, 2)) /* is there a 2nd argument? */
+ luaL_checktype(L, 2, LUA_TFUNCTION); /* must be a function */
+ lua_settop(L, 2); /* make sure there are two arguments */
+ auxsort(L, 1, (IdxT)n, 0);
+ }
+ return 0;
+}
+
+/* }====================================================== */
+
+
+static const luaL_Reg tab_funcs[] = {
+ {"concat", tconcat},
+#if defined(LUA_COMPAT_MAXN)
+ {"maxn", maxn},
+#endif
+ {"insert", tinsert},
+ {"pack", pack},
+ {"unpack", unpack},
+ {"remove", tremove},
+ {"move", tmove},
+ {"sort", sort},
+ {NULL, NULL}
+};
+
+
+LUAMOD_API int luaopen_table (lua_State *L) {
+ luaL_newlib(L, tab_funcs);
+#if defined(LUA_COMPAT_UNPACK)
+ /* _G.unpack = table.unpack */
+ lua_getfield(L, -1, "unpack");
+ lua_setglobal(L, "unpack");
+#endif
+ return 1;
+}
+
diff --git a/lua/src/ltm.c b/lua/src/ltm.c
new file mode 100644
index 000000000..14e525788
--- /dev/null
+++ b/lua/src/ltm.c
@@ -0,0 +1,165 @@
+/*
+** $Id: ltm.c,v 2.38 2016/12/22 13:08:50 roberto Exp $
+** Tag methods
+** See Copyright Notice in lua.h
+*/
+
+#define ltm_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lobject.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+#include "lvm.h"
+
+
+static const char udatatypename[] = "userdata";
+
+LUAI_DDEF const char *const luaT_typenames_[LUA_TOTALTAGS] = {
+ "no value",
+ "nil", "boolean", udatatypename, "number",
+ "string", "table", "function", udatatypename, "thread",
+ "proto" /* this last case is used for tests only */
+};
+
+
+void luaT_init (lua_State *L) {
+ static const char *const luaT_eventname[] = { /* ORDER TM */
+ "__index", "__newindex",
+ "__gc", "__mode", "__len", "__eq",
+ "__add", "__sub", "__mul", "__mod", "__pow",
+ "__div", "__idiv",
+ "__band", "__bor", "__bxor", "__shl", "__shr",
+ "__unm", "__bnot", "__lt", "__le",
+ "__concat", "__call"
+ };
+ int i;
+ for (i=0; i<TM_N; i++) {
+ G(L)->tmname[i] = luaS_new(L, luaT_eventname[i]);
+ luaC_fix(L, obj2gco(G(L)->tmname[i])); /* never collect these names */
+ }
+}
+
+
+/*
+** function to be used with macro "fasttm": optimized for absence of
+** tag methods
+*/
+const TValue *luaT_gettm (Table *events, TMS event, TString *ename) {
+ const TValue *tm = luaH_getshortstr(events, ename);
+ lua_assert(event <= TM_EQ);
+ if (ttisnil(tm)) { /* no tag method? */
+ events->flags |= cast_byte(1u<<event); /* cache this fact */
+ return NULL;
+ }
+ else return tm;
+}
+
+
+const TValue *luaT_gettmbyobj (lua_State *L, const TValue *o, TMS event) {
+ Table *mt;
+ switch (ttnov(o)) {
+ case LUA_TTABLE:
+ mt = hvalue(o)->metatable;
+ break;
+ case LUA_TUSERDATA:
+ mt = uvalue(o)->metatable;
+ break;
+ default:
+ mt = G(L)->mt[ttnov(o)];
+ }
+ return (mt ? luaH_getshortstr(mt, G(L)->tmname[event]) : luaO_nilobject);
+}
+
+
+/*
+** Return the name of the type of an object. For tables and userdata
+** with metatable, use their '__name' metafield, if present.
+*/
+const char *luaT_objtypename (lua_State *L, const TValue *o) {
+ Table *mt;
+ if ((ttistable(o) && (mt = hvalue(o)->metatable) != NULL) ||
+ (ttisfulluserdata(o) && (mt = uvalue(o)->metatable) != NULL)) {
+ const TValue *name = luaH_getshortstr(mt, luaS_new(L, "__name"));
+ if (ttisstring(name)) /* is '__name' a string? */
+ return getstr(tsvalue(name)); /* use it as type name */
+ }
+ return ttypename(ttnov(o)); /* else use standard type name */
+}
+
+
+void luaT_callTM (lua_State *L, const TValue *f, const TValue *p1,
+ const TValue *p2, TValue *p3, int hasres) {
+ ptrdiff_t result = savestack(L, p3);
+ StkId func = L->top;
+ setobj2s(L, func, f); /* push function (assume EXTRA_STACK) */
+ setobj2s(L, func + 1, p1); /* 1st argument */
+ setobj2s(L, func + 2, p2); /* 2nd argument */
+ L->top += 3;
+ if (!hasres) /* no result? 'p3' is third argument */
+ setobj2s(L, L->top++, p3); /* 3rd argument */
+ /* metamethod may yield only when called from Lua code */
+ if (isLua(L->ci))
+ luaD_call(L, func, hasres);
+ else
+ luaD_callnoyield(L, func, hasres);
+ if (hasres) { /* if has result, move it to its place */
+ p3 = restorestack(L, result);
+ setobjs2s(L, p3, --L->top);
+ }
+}
+
+
+int luaT_callbinTM (lua_State *L, const TValue *p1, const TValue *p2,
+ StkId res, TMS event) {
+ const TValue *tm = luaT_gettmbyobj(L, p1, event); /* try first operand */
+ if (ttisnil(tm))
+ tm = luaT_gettmbyobj(L, p2, event); /* try second operand */
+ if (ttisnil(tm)) return 0;
+ luaT_callTM(L, tm, p1, p2, res, 1);
+ return 1;
+}
+
+
+void luaT_trybinTM (lua_State *L, const TValue *p1, const TValue *p2,
+ StkId res, TMS event) {
+ if (!luaT_callbinTM(L, p1, p2, res, event)) {
+ switch (event) {
+ case TM_CONCAT:
+ luaG_concaterror(L, p1, p2);
+ /* call never returns, but to avoid warnings: *//* FALLTHROUGH */
+ case TM_BAND: case TM_BOR: case TM_BXOR:
+ case TM_SHL: case TM_SHR: case TM_BNOT: {
+ lua_Number dummy;
+ if (tonumber(p1, &dummy) && tonumber(p2, &dummy))
+ luaG_tointerror(L, p1, p2);
+ else
+ luaG_opinterror(L, p1, p2, "perform bitwise operation on");
+ }
+ /* calls never return, but to avoid warnings: *//* FALLTHROUGH */
+ default:
+ luaG_opinterror(L, p1, p2, "perform arithmetic on");
+ }
+ }
+}
+
+
+int luaT_callorderTM (lua_State *L, const TValue *p1, const TValue *p2,
+ TMS event) {
+ if (!luaT_callbinTM(L, p1, p2, L->top, event))
+ return -1; /* no metamethod */
+ else
+ return !l_isfalse(L->top);
+}
+
diff --git a/lua/src/ltm.h b/lua/src/ltm.h
new file mode 100644
index 000000000..63db7269b
--- /dev/null
+++ b/lua/src/ltm.h
@@ -0,0 +1,76 @@
+/*
+** $Id: ltm.h,v 2.22 2016/02/26 19:20:15 roberto Exp $
+** Tag methods
+** See Copyright Notice in lua.h
+*/
+
+#ifndef ltm_h
+#define ltm_h
+
+
+#include "lobject.h"
+
+
+/*
+* WARNING: if you change the order of this enumeration,
+* grep "ORDER TM" and "ORDER OP"
+*/
+typedef enum {
+ TM_INDEX,
+ TM_NEWINDEX,
+ TM_GC,
+ TM_MODE,
+ TM_LEN,
+ TM_EQ, /* last tag method with fast access */
+ TM_ADD,
+ TM_SUB,
+ TM_MUL,
+ TM_MOD,
+ TM_POW,
+ TM_DIV,
+ TM_IDIV,
+ TM_BAND,
+ TM_BOR,
+ TM_BXOR,
+ TM_SHL,
+ TM_SHR,
+ TM_UNM,
+ TM_BNOT,
+ TM_LT,
+ TM_LE,
+ TM_CONCAT,
+ TM_CALL,
+ TM_N /* number of elements in the enum */
+} TMS;
+
+
+
+#define gfasttm(g,et,e) ((et) == NULL ? NULL : \
+ ((et)->flags & (1u<<(e))) ? NULL : luaT_gettm(et, e, (g)->tmname[e]))
+
+#define fasttm(l,et,e) gfasttm(G(l), et, e)
+
+#define ttypename(x) luaT_typenames_[(x) + 1]
+
+LUAI_DDEC const char *const luaT_typenames_[LUA_TOTALTAGS];
+
+
+LUAI_FUNC const char *luaT_objtypename (lua_State *L, const TValue *o);
+
+LUAI_FUNC const TValue *luaT_gettm (Table *events, TMS event, TString *ename);
+LUAI_FUNC const TValue *luaT_gettmbyobj (lua_State *L, const TValue *o,
+ TMS event);
+LUAI_FUNC void luaT_init (lua_State *L);
+
+LUAI_FUNC void luaT_callTM (lua_State *L, const TValue *f, const TValue *p1,
+ const TValue *p2, TValue *p3, int hasres);
+LUAI_FUNC int luaT_callbinTM (lua_State *L, const TValue *p1, const TValue *p2,
+ StkId res, TMS event);
+LUAI_FUNC void luaT_trybinTM (lua_State *L, const TValue *p1, const TValue *p2,
+ StkId res, TMS event);
+LUAI_FUNC int luaT_callorderTM (lua_State *L, const TValue *p1,
+ const TValue *p2, TMS event);
+
+
+
+#endif
diff --git a/lua/src/lua.h b/lua/src/lua.h
new file mode 100644
index 000000000..26c0e2d69
--- /dev/null
+++ b/lua/src/lua.h
@@ -0,0 +1,486 @@
+/*
+** $Id: lua.h,v 1.332 2016/12/22 15:51:20 roberto Exp $
+** Lua - A Scripting Language
+** Lua.org, PUC-Rio, Brazil (http://www.lua.org)
+** See Copyright Notice at the end of this file
+*/
+
+
+#ifndef lua_h
+#define lua_h
+
+#include <stdarg.h>
+#include <stddef.h>
+
+
+#include "luaconf.h"
+
+
+#define LUA_VERSION_MAJOR "5"
+#define LUA_VERSION_MINOR "3"
+#define LUA_VERSION_NUM 503
+#define LUA_VERSION_RELEASE "4"
+
+#define LUA_VERSION "Lua " LUA_VERSION_MAJOR "." LUA_VERSION_MINOR
+#define LUA_RELEASE LUA_VERSION "." LUA_VERSION_RELEASE
+#define LUA_COPYRIGHT LUA_RELEASE " Copyright (C) 1994-2017 Lua.org, PUC-Rio"
+#define LUA_AUTHORS "R. Ierusalimschy, L. H. de Figueiredo, W. Celes"
+
+
+/* mark for precompiled code ('<esc>Lua') */
+#define LUA_SIGNATURE "\x1bLua"
+
+/* option for multiple returns in 'lua_pcall' and 'lua_call' */
+#define LUA_MULTRET (-1)
+
+
+/*
+** Pseudo-indices
+** (-LUAI_MAXSTACK is the minimum valid index; we keep some free empty
+** space after that to help overflow detection)
+*/
+#define LUA_REGISTRYINDEX (-LUAI_MAXSTACK - 1000)
+#define lua_upvalueindex(i) (LUA_REGISTRYINDEX - (i))
+
+
+/* thread status */
+#define LUA_OK 0
+#define LUA_YIELD 1
+#define LUA_ERRRUN 2
+#define LUA_ERRSYNTAX 3
+#define LUA_ERRMEM 4
+#define LUA_ERRGCMM 5
+#define LUA_ERRERR 6
+
+
+typedef struct lua_State lua_State;
+
+
+/*
+** basic types
+*/
+#define LUA_TNONE (-1)
+
+#define LUA_TNIL 0
+#define LUA_TBOOLEAN 1
+#define LUA_TLIGHTUSERDATA 2
+#define LUA_TNUMBER 3
+#define LUA_TSTRING 4
+#define LUA_TTABLE 5
+#define LUA_TFUNCTION 6
+#define LUA_TUSERDATA 7
+#define LUA_TTHREAD 8
+
+#define LUA_NUMTAGS 9
+
+
+
+/* minimum Lua stack available to a C function */
+#define LUA_MINSTACK 20
+
+
+/* predefined values in the registry */
+#define LUA_RIDX_MAINTHREAD 1
+#define LUA_RIDX_GLOBALS 2
+#define LUA_RIDX_LAST LUA_RIDX_GLOBALS
+
+
+/* type of numbers in Lua */
+typedef LUA_NUMBER lua_Number;
+
+
+/* type for integer functions */
+typedef LUA_INTEGER lua_Integer;
+
+/* unsigned integer type */
+typedef LUA_UNSIGNED lua_Unsigned;
+
+/* type for continuation-function contexts */
+typedef LUA_KCONTEXT lua_KContext;
+
+
+/*
+** Type for C functions registered with Lua
+*/
+typedef int (*lua_CFunction) (lua_State *L);
+
+/*
+** Type for continuation functions
+*/
+typedef int (*lua_KFunction) (lua_State *L, int status, lua_KContext ctx);
+
+
+/*
+** Type for functions that read/write blocks when loading/dumping Lua chunks
+*/
+typedef const char * (*lua_Reader) (lua_State *L, void *ud, size_t *sz);
+
+typedef int (*lua_Writer) (lua_State *L, const void *p, size_t sz, void *ud);
+
+
+/*
+** Type for memory-allocation functions
+*/
+typedef void * (*lua_Alloc) (void *ud, void *ptr, size_t osize, size_t nsize);
+
+
+
+/*
+** generic extra include file
+*/
+#if defined(LUA_USER_H)
+#include LUA_USER_H
+#endif
+
+
+/*
+** RCS ident string
+*/
+extern const char lua_ident[];
+
+
+/*
+** state manipulation
+*/
+LUA_API lua_State *(lua_newstate) (lua_Alloc f, void *ud);
+LUA_API void (lua_close) (lua_State *L);
+LUA_API lua_State *(lua_newthread) (lua_State *L);
+
+LUA_API lua_CFunction (lua_atpanic) (lua_State *L, lua_CFunction panicf);
+
+
+LUA_API const lua_Number *(lua_version) (lua_State *L);
+
+
+/*
+** basic stack manipulation
+*/
+LUA_API int (lua_absindex) (lua_State *L, int idx);
+LUA_API int (lua_gettop) (lua_State *L);
+LUA_API void (lua_settop) (lua_State *L, int idx);
+LUA_API void (lua_pushvalue) (lua_State *L, int idx);
+LUA_API void (lua_rotate) (lua_State *L, int idx, int n);
+LUA_API void (lua_copy) (lua_State *L, int fromidx, int toidx);
+LUA_API int (lua_checkstack) (lua_State *L, int n);
+
+LUA_API void (lua_xmove) (lua_State *from, lua_State *to, int n);
+
+
+/*
+** access functions (stack -> C)
+*/
+
+LUA_API int (lua_isnumber) (lua_State *L, int idx);
+LUA_API int (lua_isstring) (lua_State *L, int idx);
+LUA_API int (lua_iscfunction) (lua_State *L, int idx);
+LUA_API int (lua_isinteger) (lua_State *L, int idx);
+LUA_API int (lua_isuserdata) (lua_State *L, int idx);
+LUA_API int (lua_type) (lua_State *L, int idx);
+LUA_API const char *(lua_typename) (lua_State *L, int tp);
+
+LUA_API lua_Number (lua_tonumberx) (lua_State *L, int idx, int *isnum);
+LUA_API lua_Integer (lua_tointegerx) (lua_State *L, int idx, int *isnum);
+LUA_API int (lua_toboolean) (lua_State *L, int idx);
+LUA_API const char *(lua_tolstring) (lua_State *L, int idx, size_t *len);
+LUA_API size_t (lua_rawlen) (lua_State *L, int idx);
+LUA_API lua_CFunction (lua_tocfunction) (lua_State *L, int idx);
+LUA_API void *(lua_touserdata) (lua_State *L, int idx);
+LUA_API lua_State *(lua_tothread) (lua_State *L, int idx);
+LUA_API const void *(lua_topointer) (lua_State *L, int idx);
+
+
+/*
+** Comparison and arithmetic functions
+*/
+
+#define LUA_OPADD 0 /* ORDER TM, ORDER OP */
+#define LUA_OPSUB 1
+#define LUA_OPMUL 2
+#define LUA_OPMOD 3
+#define LUA_OPPOW 4
+#define LUA_OPDIV 5
+#define LUA_OPIDIV 6
+#define LUA_OPBAND 7
+#define LUA_OPBOR 8
+#define LUA_OPBXOR 9
+#define LUA_OPSHL 10
+#define LUA_OPSHR 11
+#define LUA_OPUNM 12
+#define LUA_OPBNOT 13
+
+LUA_API void (lua_arith) (lua_State *L, int op);
+
+#define LUA_OPEQ 0
+#define LUA_OPLT 1
+#define LUA_OPLE 2
+
+LUA_API int (lua_rawequal) (lua_State *L, int idx1, int idx2);
+LUA_API int (lua_compare) (lua_State *L, int idx1, int idx2, int op);
+
+
+/*
+** push functions (C -> stack)
+*/
+LUA_API void (lua_pushnil) (lua_State *L);
+LUA_API void (lua_pushnumber) (lua_State *L, lua_Number n);
+LUA_API void (lua_pushinteger) (lua_State *L, lua_Integer n);
+LUA_API const char *(lua_pushlstring) (lua_State *L, const char *s, size_t len);
+LUA_API const char *(lua_pushstring) (lua_State *L, const char *s);
+LUA_API const char *(lua_pushvfstring) (lua_State *L, const char *fmt,
+ va_list argp);
+LUA_API const char *(lua_pushfstring) (lua_State *L, const char *fmt, ...);
+LUA_API void (lua_pushcclosure) (lua_State *L, lua_CFunction fn, int n);
+LUA_API void (lua_pushboolean) (lua_State *L, int b);
+LUA_API void (lua_pushlightuserdata) (lua_State *L, void *p);
+LUA_API int (lua_pushthread) (lua_State *L);
+
+
+/*
+** get functions (Lua -> stack)
+*/
+LUA_API int (lua_getglobal) (lua_State *L, const char *name);
+LUA_API int (lua_gettable) (lua_State *L, int idx);
+LUA_API int (lua_getfield) (lua_State *L, int idx, const char *k);
+LUA_API int (lua_geti) (lua_State *L, int idx, lua_Integer n);
+LUA_API int (lua_rawget) (lua_State *L, int idx);
+LUA_API int (lua_rawgeti) (lua_State *L, int idx, lua_Integer n);
+LUA_API int (lua_rawgetp) (lua_State *L, int idx, const void *p);
+
+LUA_API void (lua_createtable) (lua_State *L, int narr, int nrec);
+LUA_API void *(lua_newuserdata) (lua_State *L, size_t sz);
+LUA_API int (lua_getmetatable) (lua_State *L, int objindex);
+LUA_API int (lua_getuservalue) (lua_State *L, int idx);
+
+
+/*
+** set functions (stack -> Lua)
+*/
+LUA_API void (lua_setglobal) (lua_State *L, const char *name);
+LUA_API void (lua_settable) (lua_State *L, int idx);
+LUA_API void (lua_setfield) (lua_State *L, int idx, const char *k);
+LUA_API void (lua_seti) (lua_State *L, int idx, lua_Integer n);
+LUA_API void (lua_rawset) (lua_State *L, int idx);
+LUA_API void (lua_rawseti) (lua_State *L, int idx, lua_Integer n);
+LUA_API void (lua_rawsetp) (lua_State *L, int idx, const void *p);
+LUA_API int (lua_setmetatable) (lua_State *L, int objindex);
+LUA_API void (lua_setuservalue) (lua_State *L, int idx);
+
+
+/*
+** 'load' and 'call' functions (load and run Lua code)
+*/
+LUA_API void (lua_callk) (lua_State *L, int nargs, int nresults,
+ lua_KContext ctx, lua_KFunction k);
+#define lua_call(L,n,r) lua_callk(L, (n), (r), 0, NULL)
+
+LUA_API int (lua_pcallk) (lua_State *L, int nargs, int nresults, int errfunc,
+ lua_KContext ctx, lua_KFunction k);
+#define lua_pcall(L,n,r,f) lua_pcallk(L, (n), (r), (f), 0, NULL)
+
+LUA_API int (lua_load) (lua_State *L, lua_Reader reader, void *dt,
+ const char *chunkname, const char *mode);
+
+LUA_API int (lua_dump) (lua_State *L, lua_Writer writer, void *data, int strip);
+
+
+/*
+** coroutine functions
+*/
+LUA_API int (lua_yieldk) (lua_State *L, int nresults, lua_KContext ctx,
+ lua_KFunction k);
+LUA_API int (lua_resume) (lua_State *L, lua_State *from, int narg);
+LUA_API int (lua_status) (lua_State *L);
+LUA_API int (lua_isyieldable) (lua_State *L);
+
+#define lua_yield(L,n) lua_yieldk(L, (n), 0, NULL)
+
+
+/*
+** garbage-collection function and options
+*/
+
+#define LUA_GCSTOP 0
+#define LUA_GCRESTART 1
+#define LUA_GCCOLLECT 2
+#define LUA_GCCOUNT 3
+#define LUA_GCCOUNTB 4
+#define LUA_GCSTEP 5
+#define LUA_GCSETPAUSE 6
+#define LUA_GCSETSTEPMUL 7
+#define LUA_GCISRUNNING 9
+
+LUA_API int (lua_gc) (lua_State *L, int what, int data);
+
+
+/*
+** miscellaneous functions
+*/
+
+LUA_API int (lua_error) (lua_State *L);
+
+LUA_API int (lua_next) (lua_State *L, int idx);
+
+LUA_API void (lua_concat) (lua_State *L, int n);
+LUA_API void (lua_len) (lua_State *L, int idx);
+
+LUA_API size_t (lua_stringtonumber) (lua_State *L, const char *s);
+
+LUA_API lua_Alloc (lua_getallocf) (lua_State *L, void **ud);
+LUA_API void (lua_setallocf) (lua_State *L, lua_Alloc f, void *ud);
+
+
+
+/*
+** {==============================================================
+** some useful macros
+** ===============================================================
+*/
+
+#define lua_getextraspace(L) ((void *)((char *)(L) - LUA_EXTRASPACE))
+
+#define lua_tonumber(L,i) lua_tonumberx(L,(i),NULL)
+#define lua_tointeger(L,i) lua_tointegerx(L,(i),NULL)
+
+#define lua_pop(L,n) lua_settop(L, -(n)-1)
+
+#define lua_newtable(L) lua_createtable(L, 0, 0)
+
+#define lua_register(L,n,f) (lua_pushcfunction(L, (f)), lua_setglobal(L, (n)))
+
+#define lua_pushcfunction(L,f) lua_pushcclosure(L, (f), 0)
+
+#define lua_isfunction(L,n) (lua_type(L, (n)) == LUA_TFUNCTION)
+#define lua_istable(L,n) (lua_type(L, (n)) == LUA_TTABLE)
+#define lua_islightuserdata(L,n) (lua_type(L, (n)) == LUA_TLIGHTUSERDATA)
+#define lua_isnil(L,n) (lua_type(L, (n)) == LUA_TNIL)
+#define lua_isboolean(L,n) (lua_type(L, (n)) == LUA_TBOOLEAN)
+#define lua_isthread(L,n) (lua_type(L, (n)) == LUA_TTHREAD)
+#define lua_isnone(L,n) (lua_type(L, (n)) == LUA_TNONE)
+#define lua_isnoneornil(L, n) (lua_type(L, (n)) <= 0)
+
+#define lua_pushliteral(L, s) lua_pushstring(L, "" s)
+
+#define lua_pushglobaltable(L) \
+ ((void)lua_rawgeti(L, LUA_REGISTRYINDEX, LUA_RIDX_GLOBALS))
+
+#define lua_tostring(L,i) lua_tolstring(L, (i), NULL)
+
+
+#define lua_insert(L,idx) lua_rotate(L, (idx), 1)
+
+#define lua_remove(L,idx) (lua_rotate(L, (idx), -1), lua_pop(L, 1))
+
+#define lua_replace(L,idx) (lua_copy(L, -1, (idx)), lua_pop(L, 1))
+
+/* }============================================================== */
+
+
+/*
+** {==============================================================
+** compatibility macros for unsigned conversions
+** ===============================================================
+*/
+#if defined(LUA_COMPAT_APIINTCASTS)
+
+#define lua_pushunsigned(L,n) lua_pushinteger(L, (lua_Integer)(n))
+#define lua_tounsignedx(L,i,is) ((lua_Unsigned)lua_tointegerx(L,i,is))
+#define lua_tounsigned(L,i) lua_tounsignedx(L,(i),NULL)
+
+#endif
+/* }============================================================== */
+
+/*
+** {======================================================================
+** Debug API
+** =======================================================================
+*/
+
+
+/*
+** Event codes
+*/
+#define LUA_HOOKCALL 0
+#define LUA_HOOKRET 1
+#define LUA_HOOKLINE 2
+#define LUA_HOOKCOUNT 3
+#define LUA_HOOKTAILCALL 4
+
+
+/*
+** Event masks
+*/
+#define LUA_MASKCALL (1 << LUA_HOOKCALL)
+#define LUA_MASKRET (1 << LUA_HOOKRET)
+#define LUA_MASKLINE (1 << LUA_HOOKLINE)
+#define LUA_MASKCOUNT (1 << LUA_HOOKCOUNT)
+
+typedef struct lua_Debug lua_Debug; /* activation record */
+
+
+/* Functions to be called by the debugger in specific events */
+typedef void (*lua_Hook) (lua_State *L, lua_Debug *ar);
+
+
+LUA_API int (lua_getstack) (lua_State *L, int level, lua_Debug *ar);
+LUA_API int (lua_getinfo) (lua_State *L, const char *what, lua_Debug *ar);
+LUA_API const char *(lua_getlocal) (lua_State *L, const lua_Debug *ar, int n);
+LUA_API const char *(lua_setlocal) (lua_State *L, const lua_Debug *ar, int n);
+LUA_API const char *(lua_getupvalue) (lua_State *L, int funcindex, int n);
+LUA_API const char *(lua_setupvalue) (lua_State *L, int funcindex, int n);
+
+LUA_API void *(lua_upvalueid) (lua_State *L, int fidx, int n);
+LUA_API void (lua_upvaluejoin) (lua_State *L, int fidx1, int n1,
+ int fidx2, int n2);
+
+LUA_API void (lua_sethook) (lua_State *L, lua_Hook func, int mask, int count);
+LUA_API lua_Hook (lua_gethook) (lua_State *L);
+LUA_API int (lua_gethookmask) (lua_State *L);
+LUA_API int (lua_gethookcount) (lua_State *L);
+
+
+struct lua_Debug {
+ int event;
+ const char *name; /* (n) */
+ const char *namewhat; /* (n) 'global', 'local', 'field', 'method' */
+ const char *what; /* (S) 'Lua', 'C', 'main', 'tail' */
+ const char *source; /* (S) */
+ int currentline; /* (l) */
+ int linedefined; /* (S) */
+ int lastlinedefined; /* (S) */
+ unsigned char nups; /* (u) number of upvalues */
+ unsigned char nparams;/* (u) number of parameters */
+ char isvararg; /* (u) */
+ char istailcall; /* (t) */
+ char short_src[LUA_IDSIZE]; /* (S) */
+ /* private part */
+ struct CallInfo *i_ci; /* active function */
+};
+
+/* }====================================================================== */
+
+
+/******************************************************************************
+* Copyright (C) 1994-2017 Lua.org, PUC-Rio.
+*
+* Permission is hereby granted, free of charge, to any person obtaining
+* a copy of this software and associated documentation files (the
+* "Software"), to deal in the Software without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Software, and to
+* permit persons to whom the Software is furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+******************************************************************************/
+
+
+#endif
diff --git a/lua/src/lua.hpp b/lua/src/lua.hpp
new file mode 100644
index 000000000..ec417f594
--- /dev/null
+++ b/lua/src/lua.hpp
@@ -0,0 +1,9 @@
+// lua.hpp
+// Lua header files for C++
+// <<extern "C">> not supplied automatically because Lua also compiles as C++
+
+extern "C" {
+#include "lua.h"
+#include "lualib.h"
+#include "lauxlib.h"
+}
diff --git a/lua/src/luaconf.h b/lua/src/luaconf.h
new file mode 100644
index 000000000..f37bea096
--- /dev/null
+++ b/lua/src/luaconf.h
@@ -0,0 +1,783 @@
+/*
+** $Id: luaconf.h,v 1.259 2016/12/22 13:08:50 roberto Exp $
+** Configuration file for Lua
+** See Copyright Notice in lua.h
+*/
+
+
+#ifndef luaconf_h
+#define luaconf_h
+
+#include <limits.h>
+#include <stddef.h>
+
+
+/*
+** ===================================================================
+** Search for "@@" to find all configurable definitions.
+** ===================================================================
+*/
+
+
+/*
+** {====================================================================
+** System Configuration: macros to adapt (if needed) Lua to some
+** particular platform, for instance compiling it with 32-bit numbers or
+** restricting it to C89.
+** =====================================================================
+*/
+
+/*
+@@ LUA_32BITS enables Lua with 32-bit integers and 32-bit floats. You
+** can also define LUA_32BITS in the make file, but changing here you
+** ensure that all software connected to Lua will be compiled with the
+** same configuration.
+*/
+/* #define LUA_32BITS */
+
+
+/*
+@@ LUA_USE_C89 controls the use of non-ISO-C89 features.
+** Define it if you want Lua to avoid the use of a few C99 features
+** or Windows-specific features on Windows.
+*/
+/* #define LUA_USE_C89 */
+
+
+/*
+** By default, Lua on Windows use (some) specific Windows features
+*/
+#if !defined(LUA_USE_C89) && defined(_WIN32) && !defined(_WIN32_WCE)
+#define LUA_USE_WINDOWS /* enable goodies for regular Windows */
+#endif
+
+
+#if defined(LUA_USE_WINDOWS)
+#define LUA_DL_DLL /* enable support for DLL */
+#define LUA_USE_C89 /* broadly, Windows is C89 */
+#endif
+
+
+#if defined(LUA_USE_LINUX)
+#define LUA_USE_POSIX
+#define LUA_USE_DLOPEN /* needs an extra library: -ldl */
+#define LUA_USE_READLINE /* needs some extra libraries */
+#endif
+
+
+#if defined(LUA_USE_MACOSX)
+#define LUA_USE_POSIX
+#define LUA_USE_DLOPEN /* MacOS does not need -ldl */
+#define LUA_USE_READLINE /* needs an extra library: -lreadline */
+#endif
+
+
+/*
+@@ LUA_C89_NUMBERS ensures that Lua uses the largest types available for
+** C89 ('long' and 'double'); Windows always has '__int64', so it does
+** not need to use this case.
+*/
+#if defined(LUA_USE_C89) && !defined(LUA_USE_WINDOWS)
+#define LUA_C89_NUMBERS
+#endif
+
+
+
+/*
+@@ LUAI_BITSINT defines the (minimum) number of bits in an 'int'.
+*/
+/* avoid undefined shifts */
+#if ((INT_MAX >> 15) >> 15) >= 1
+#define LUAI_BITSINT 32
+#else
+/* 'int' always must have at least 16 bits */
+#define LUAI_BITSINT 16
+#endif
+
+
+/*
+@@ LUA_INT_TYPE defines the type for Lua integers.
+@@ LUA_FLOAT_TYPE defines the type for Lua floats.
+** Lua should work fine with any mix of these options (if supported
+** by your C compiler). The usual configurations are 64-bit integers
+** and 'double' (the default), 32-bit integers and 'float' (for
+** restricted platforms), and 'long'/'double' (for C compilers not
+** compliant with C99, which may not have support for 'long long').
+*/
+
+/* predefined options for LUA_INT_TYPE */
+#define LUA_INT_INT 1
+#define LUA_INT_LONG 2
+#define LUA_INT_LONGLONG 3
+
+/* predefined options for LUA_FLOAT_TYPE */
+#define LUA_FLOAT_FLOAT 1
+#define LUA_FLOAT_DOUBLE 2
+#define LUA_FLOAT_LONGDOUBLE 3
+
+#if defined(LUA_32BITS) /* { */
+/*
+** 32-bit integers and 'float'
+*/
+#if LUAI_BITSINT >= 32 /* use 'int' if big enough */
+#define LUA_INT_TYPE LUA_INT_INT
+#else /* otherwise use 'long' */
+#define LUA_INT_TYPE LUA_INT_LONG
+#endif
+#define LUA_FLOAT_TYPE LUA_FLOAT_FLOAT
+
+#elif defined(LUA_C89_NUMBERS) /* }{ */
+/*
+** largest types available for C89 ('long' and 'double')
+*/
+#define LUA_INT_TYPE LUA_INT_LONG
+#define LUA_FLOAT_TYPE LUA_FLOAT_DOUBLE
+
+#endif /* } */
+
+
+/*
+** default configuration for 64-bit Lua ('long long' and 'double')
+*/
+#if !defined(LUA_INT_TYPE)
+#define LUA_INT_TYPE LUA_INT_LONGLONG
+#endif
+
+#if !defined(LUA_FLOAT_TYPE)
+#define LUA_FLOAT_TYPE LUA_FLOAT_DOUBLE
+#endif
+
+/* }================================================================== */
+
+
+
+
+/*
+** {==================================================================
+** Configuration for Paths.
+** ===================================================================
+*/
+
+/*
+** LUA_PATH_SEP is the character that separates templates in a path.
+** LUA_PATH_MARK is the string that marks the substitution points in a
+** template.
+** LUA_EXEC_DIR in a Windows path is replaced by the executable's
+** directory.
+*/
+#define LUA_PATH_SEP ";"
+#define LUA_PATH_MARK "?"
+#define LUA_EXEC_DIR "!"
+
+
+/*
+@@ LUA_PATH_DEFAULT is the default path that Lua uses to look for
+** Lua libraries.
+@@ LUA_CPATH_DEFAULT is the default path that Lua uses to look for
+** C libraries.
+** CHANGE them if your machine has a non-conventional directory
+** hierarchy or if you want to install your libraries in
+** non-conventional directories.
+*/
+#define LUA_VDIR LUA_VERSION_MAJOR "." LUA_VERSION_MINOR
+#if defined(_WIN32) /* { */
+/*
+** In Windows, any exclamation mark ('!') in the path is replaced by the
+** path of the directory of the executable file of the current process.
+*/
+#define LUA_LDIR "!\\lua\\"
+#define LUA_CDIR "!\\"
+#define LUA_SHRDIR "!\\..\\share\\lua\\" LUA_VDIR "\\"
+#define LUA_PATH_DEFAULT \
+ LUA_LDIR"?.lua;" LUA_LDIR"?\\init.lua;" \
+ LUA_CDIR"?.lua;" LUA_CDIR"?\\init.lua;" \
+ LUA_SHRDIR"?.lua;" LUA_SHRDIR"?\\init.lua;" \
+ ".\\?.lua;" ".\\?\\init.lua"
+#define LUA_CPATH_DEFAULT \
+ LUA_CDIR"?.dll;" \
+ LUA_CDIR"..\\lib\\lua\\" LUA_VDIR "\\?.dll;" \
+ LUA_CDIR"loadall.dll;" ".\\?.dll"
+
+#else /* }{ */
+
+#define LUA_ROOT "/usr/local/"
+#define LUA_LDIR LUA_ROOT "share/lua/" LUA_VDIR "/"
+#define LUA_CDIR LUA_ROOT "lib/lua/" LUA_VDIR "/"
+#define LUA_PATH_DEFAULT \
+ LUA_LDIR"?.lua;" LUA_LDIR"?/init.lua;" \
+ LUA_CDIR"?.lua;" LUA_CDIR"?/init.lua;" \
+ "./?.lua;" "./?/init.lua"
+#define LUA_CPATH_DEFAULT \
+ LUA_CDIR"?.so;" LUA_CDIR"loadall.so;" "./?.so"
+#endif /* } */
+
+
+/*
+@@ LUA_DIRSEP is the directory separator (for submodules).
+** CHANGE it if your machine does not use "/" as the directory separator
+** and is not Windows. (On Windows Lua automatically uses "\".)
+*/
+#if defined(_WIN32)
+#define LUA_DIRSEP "\\"
+#else
+#define LUA_DIRSEP "/"
+#endif
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Marks for exported symbols in the C code
+** ===================================================================
+*/
+
+/*
+@@ LUA_API is a mark for all core API functions.
+@@ LUALIB_API is a mark for all auxiliary library functions.
+@@ LUAMOD_API is a mark for all standard library opening functions.
+** CHANGE them if you need to define those functions in some special way.
+** For instance, if you want to create one Windows DLL with the core and
+** the libraries, you may want to use the following definition (define
+** LUA_BUILD_AS_DLL to get it).
+*/
+#if defined(LUA_BUILD_AS_DLL) /* { */
+
+#if defined(LUA_CORE) || defined(LUA_LIB) /* { */
+#define LUA_API __declspec(dllexport)
+#else /* }{ */
+#define LUA_API __declspec(dllimport)
+#endif /* } */
+
+#else /* }{ */
+
+#define LUA_API extern
+
+#endif /* } */
+
+
+/* more often than not the libs go together with the core */
+#define LUALIB_API LUA_API
+#define LUAMOD_API LUALIB_API
+
+
+/*
+@@ LUAI_FUNC is a mark for all extern functions that are not to be
+** exported to outside modules.
+@@ LUAI_DDEF and LUAI_DDEC are marks for all extern (const) variables
+** that are not to be exported to outside modules (LUAI_DDEF for
+** definitions and LUAI_DDEC for declarations).
+** CHANGE them if you need to mark them in some special way. Elf/gcc
+** (versions 3.2 and later) mark them as "hidden" to optimize access
+** when Lua is compiled as a shared library. Not all elf targets support
+** this attribute. Unfortunately, gcc does not offer a way to check
+** whether the target offers that support, and those without support
+** give a warning about it. To avoid these warnings, change to the
+** default definition.
+*/
+#if defined(__GNUC__) && ((__GNUC__*100 + __GNUC_MINOR__) >= 302) && \
+ defined(__ELF__) /* { */
+#define LUAI_FUNC __attribute__((visibility("hidden"))) extern
+#else /* }{ */
+#define LUAI_FUNC extern
+#endif /* } */
+
+#define LUAI_DDEC LUAI_FUNC
+#define LUAI_DDEF /* empty */
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Compatibility with previous versions
+** ===================================================================
+*/
+
+/*
+@@ LUA_COMPAT_5_2 controls other macros for compatibility with Lua 5.2.
+@@ LUA_COMPAT_5_1 controls other macros for compatibility with Lua 5.1.
+** You can define it to get all options, or change specific options
+** to fit your specific needs.
+*/
+#if defined(LUA_COMPAT_5_2) /* { */
+
+/*
+@@ LUA_COMPAT_MATHLIB controls the presence of several deprecated
+** functions in the mathematical library.
+*/
+#define LUA_COMPAT_MATHLIB
+
+/*
+@@ LUA_COMPAT_BITLIB controls the presence of library 'bit32'.
+*/
+#define LUA_COMPAT_BITLIB
+
+/*
+@@ LUA_COMPAT_IPAIRS controls the effectiveness of the __ipairs metamethod.
+*/
+#define LUA_COMPAT_IPAIRS
+
+/*
+@@ LUA_COMPAT_APIINTCASTS controls the presence of macros for
+** manipulating other integer types (lua_pushunsigned, lua_tounsigned,
+** luaL_checkint, luaL_checklong, etc.)
+*/
+#define LUA_COMPAT_APIINTCASTS
+
+#endif /* } */
+
+
+#if defined(LUA_COMPAT_5_1) /* { */
+
+/* Incompatibilities from 5.2 -> 5.3 */
+#define LUA_COMPAT_MATHLIB
+#define LUA_COMPAT_APIINTCASTS
+
+/*
+@@ LUA_COMPAT_UNPACK controls the presence of global 'unpack'.
+** You can replace it with 'table.unpack'.
+*/
+#define LUA_COMPAT_UNPACK
+
+/*
+@@ LUA_COMPAT_LOADERS controls the presence of table 'package.loaders'.
+** You can replace it with 'package.searchers'.
+*/
+#define LUA_COMPAT_LOADERS
+
+/*
+@@ macro 'lua_cpcall' emulates deprecated function lua_cpcall.
+** You can call your C function directly (with light C functions).
+*/
+#define lua_cpcall(L,f,u) \
+ (lua_pushcfunction(L, (f)), \
+ lua_pushlightuserdata(L,(u)), \
+ lua_pcall(L,1,0,0))
+
+
+/*
+@@ LUA_COMPAT_LOG10 defines the function 'log10' in the math library.
+** You can rewrite 'log10(x)' as 'log(x, 10)'.
+*/
+#define LUA_COMPAT_LOG10
+
+/*
+@@ LUA_COMPAT_LOADSTRING defines the function 'loadstring' in the base
+** library. You can rewrite 'loadstring(s)' as 'load(s)'.
+*/
+#define LUA_COMPAT_LOADSTRING
+
+/*
+@@ LUA_COMPAT_MAXN defines the function 'maxn' in the table library.
+*/
+#define LUA_COMPAT_MAXN
+
+/*
+@@ The following macros supply trivial compatibility for some
+** changes in the API. The macros themselves document how to
+** change your code to avoid using them.
+*/
+#define lua_strlen(L,i) lua_rawlen(L, (i))
+
+#define lua_objlen(L,i) lua_rawlen(L, (i))
+
+#define lua_equal(L,idx1,idx2) lua_compare(L,(idx1),(idx2),LUA_OPEQ)
+#define lua_lessthan(L,idx1,idx2) lua_compare(L,(idx1),(idx2),LUA_OPLT)
+
+/*
+@@ LUA_COMPAT_MODULE controls compatibility with previous
+** module functions 'module' (Lua) and 'luaL_register' (C).
+*/
+#define LUA_COMPAT_MODULE
+
+#endif /* } */
+
+
+/*
+@@ LUA_COMPAT_FLOATSTRING makes Lua format integral floats without a
+@@ a float mark ('.0').
+** This macro is not on by default even in compatibility mode,
+** because this is not really an incompatibility.
+*/
+/* #define LUA_COMPAT_FLOATSTRING */
+
+/* }================================================================== */
+
+
+
+/*
+** {==================================================================
+** Configuration for Numbers.
+** Change these definitions if no predefined LUA_FLOAT_* / LUA_INT_*
+** satisfy your needs.
+** ===================================================================
+*/
+
+/*
+@@ LUA_NUMBER is the floating-point type used by Lua.
+@@ LUAI_UACNUMBER is the result of a 'default argument promotion'
+@@ over a floating number.
+@@ l_mathlim(x) corrects limit name 'x' to the proper float type
+** by prefixing it with one of FLT/DBL/LDBL.
+@@ LUA_NUMBER_FRMLEN is the length modifier for writing floats.
+@@ LUA_NUMBER_FMT is the format for writing floats.
+@@ lua_number2str converts a float to a string.
+@@ l_mathop allows the addition of an 'l' or 'f' to all math operations.
+@@ l_floor takes the floor of a float.
+@@ lua_str2number converts a decimal numeric string to a number.
+*/
+
+
+/* The following definitions are good for most cases here */
+
+#define l_floor(x) (l_mathop(floor)(x))
+
+#define lua_number2str(s,sz,n) \
+ l_sprintf((s), sz, LUA_NUMBER_FMT, (LUAI_UACNUMBER)(n))
+
+/*
+@@ lua_numbertointeger converts a float number to an integer, or
+** returns 0 if float is not within the range of a lua_Integer.
+** (The range comparisons are tricky because of rounding. The tests
+** here assume a two-complement representation, where MININTEGER always
+** has an exact representation as a float; MAXINTEGER may not have one,
+** and therefore its conversion to float may have an ill-defined value.)
+*/
+#define lua_numbertointeger(n,p) \
+ ((n) >= (LUA_NUMBER)(LUA_MININTEGER) && \
+ (n) < -(LUA_NUMBER)(LUA_MININTEGER) && \
+ (*(p) = (LUA_INTEGER)(n), 1))
+
+
+/* now the variable definitions */
+
+#if LUA_FLOAT_TYPE == LUA_FLOAT_FLOAT /* { single float */
+
+#define LUA_NUMBER float
+
+#define l_mathlim(n) (FLT_##n)
+
+#define LUAI_UACNUMBER double
+
+#define LUA_NUMBER_FRMLEN ""
+#define LUA_NUMBER_FMT "%.7g"
+
+#define l_mathop(op) op##f
+
+#define lua_str2number(s,p) strtof((s), (p))
+
+
+#elif LUA_FLOAT_TYPE == LUA_FLOAT_LONGDOUBLE /* }{ long double */
+
+#define LUA_NUMBER long double
+
+#define l_mathlim(n) (LDBL_##n)
+
+#define LUAI_UACNUMBER long double
+
+#define LUA_NUMBER_FRMLEN "L"
+#define LUA_NUMBER_FMT "%.19Lg"
+
+#define l_mathop(op) op##l
+
+#define lua_str2number(s,p) strtold((s), (p))
+
+#elif LUA_FLOAT_TYPE == LUA_FLOAT_DOUBLE /* }{ double */
+
+#define LUA_NUMBER double
+
+#define l_mathlim(n) (DBL_##n)
+
+#define LUAI_UACNUMBER double
+
+#define LUA_NUMBER_FRMLEN ""
+#define LUA_NUMBER_FMT "%.14g"
+
+#define l_mathop(op) op
+
+#define lua_str2number(s,p) strtod((s), (p))
+
+#else /* }{ */
+
+#error "numeric float type not defined"
+
+#endif /* } */
+
+
+
+/*
+@@ LUA_INTEGER is the integer type used by Lua.
+**
+@@ LUA_UNSIGNED is the unsigned version of LUA_INTEGER.
+**
+@@ LUAI_UACINT is the result of a 'default argument promotion'
+@@ over a lUA_INTEGER.
+@@ LUA_INTEGER_FRMLEN is the length modifier for reading/writing integers.
+@@ LUA_INTEGER_FMT is the format for writing integers.
+@@ LUA_MAXINTEGER is the maximum value for a LUA_INTEGER.
+@@ LUA_MININTEGER is the minimum value for a LUA_INTEGER.
+@@ lua_integer2str converts an integer to a string.
+*/
+
+
+/* The following definitions are good for most cases here */
+
+#define LUA_INTEGER_FMT "%" LUA_INTEGER_FRMLEN "d"
+
+#define LUAI_UACINT LUA_INTEGER
+
+#define lua_integer2str(s,sz,n) \
+ l_sprintf((s), sz, LUA_INTEGER_FMT, (LUAI_UACINT)(n))
+
+/*
+** use LUAI_UACINT here to avoid problems with promotions (which
+** can turn a comparison between unsigneds into a signed comparison)
+*/
+#define LUA_UNSIGNED unsigned LUAI_UACINT
+
+
+/* now the variable definitions */
+
+#if LUA_INT_TYPE == LUA_INT_INT /* { int */
+
+#define LUA_INTEGER int
+#define LUA_INTEGER_FRMLEN ""
+
+#define LUA_MAXINTEGER INT_MAX
+#define LUA_MININTEGER INT_MIN
+
+#elif LUA_INT_TYPE == LUA_INT_LONG /* }{ long */
+
+#define LUA_INTEGER long
+#define LUA_INTEGER_FRMLEN "l"
+
+#define LUA_MAXINTEGER LONG_MAX
+#define LUA_MININTEGER LONG_MIN
+
+#elif LUA_INT_TYPE == LUA_INT_LONGLONG /* }{ long long */
+
+/* use presence of macro LLONG_MAX as proxy for C99 compliance */
+#if defined(LLONG_MAX) /* { */
+/* use ISO C99 stuff */
+
+#define LUA_INTEGER long long
+#define LUA_INTEGER_FRMLEN "ll"
+
+#define LUA_MAXINTEGER LLONG_MAX
+#define LUA_MININTEGER LLONG_MIN
+
+#elif defined(LUA_USE_WINDOWS) /* }{ */
+/* in Windows, can use specific Windows types */
+
+#define LUA_INTEGER __int64
+#define LUA_INTEGER_FRMLEN "I64"
+
+#define LUA_MAXINTEGER _I64_MAX
+#define LUA_MININTEGER _I64_MIN
+
+#else /* }{ */
+
+#error "Compiler does not support 'long long'. Use option '-DLUA_32BITS' \
+ or '-DLUA_C89_NUMBERS' (see file 'luaconf.h' for details)"
+
+#endif /* } */
+
+#else /* }{ */
+
+#error "numeric integer type not defined"
+
+#endif /* } */
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Dependencies with C99 and other C details
+** ===================================================================
+*/
+
+/*
+@@ l_sprintf is equivalent to 'snprintf' or 'sprintf' in C89.
+** (All uses in Lua have only one format item.)
+*/
+#if !defined(LUA_USE_C89)
+#define l_sprintf(s,sz,f,i) snprintf(s,sz,f,i)
+#else
+#define l_sprintf(s,sz,f,i) ((void)(sz), sprintf(s,f,i))
+#endif
+
+
+/*
+@@ lua_strx2number converts an hexadecimal numeric string to a number.
+** In C99, 'strtod' does that conversion. Otherwise, you can
+** leave 'lua_strx2number' undefined and Lua will provide its own
+** implementation.
+*/
+#if !defined(LUA_USE_C89)
+#define lua_strx2number(s,p) lua_str2number(s,p)
+#endif
+
+
+/*
+@@ lua_number2strx converts a float to an hexadecimal numeric string.
+** In C99, 'sprintf' (with format specifiers '%a'/'%A') does that.
+** Otherwise, you can leave 'lua_number2strx' undefined and Lua will
+** provide its own implementation.
+*/
+#if !defined(LUA_USE_C89)
+#define lua_number2strx(L,b,sz,f,n) \
+ ((void)L, l_sprintf(b,sz,f,(LUAI_UACNUMBER)(n)))
+#endif
+
+
+/*
+** 'strtof' and 'opf' variants for math functions are not valid in
+** C89. Otherwise, the macro 'HUGE_VALF' is a good proxy for testing the
+** availability of these variants. ('math.h' is already included in
+** all files that use these macros.)
+*/
+#if defined(LUA_USE_C89) || (defined(HUGE_VAL) && !defined(HUGE_VALF))
+#undef l_mathop /* variants not available */
+#undef lua_str2number
+#define l_mathop(op) (lua_Number)op /* no variant */
+#define lua_str2number(s,p) ((lua_Number)strtod((s), (p)))
+#endif
+
+
+/*
+@@ LUA_KCONTEXT is the type of the context ('ctx') for continuation
+** functions. It must be a numerical type; Lua will use 'intptr_t' if
+** available, otherwise it will use 'ptrdiff_t' (the nearest thing to
+** 'intptr_t' in C89)
+*/
+#define LUA_KCONTEXT ptrdiff_t
+
+#if !defined(LUA_USE_C89) && defined(__STDC_VERSION__) && \
+ __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+#if defined(INTPTR_MAX) /* even in C99 this type is optional */
+#undef LUA_KCONTEXT
+#define LUA_KCONTEXT intptr_t
+#endif
+#endif
+
+
+/*
+@@ lua_getlocaledecpoint gets the locale "radix character" (decimal point).
+** Change that if you do not want to use C locales. (Code using this
+** macro must include header 'locale.h'.)
+*/
+#if !defined(lua_getlocaledecpoint)
+#define lua_getlocaledecpoint() (localeconv()->decimal_point[0])
+#endif
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Language Variations
+** =====================================================================
+*/
+
+/*
+@@ LUA_NOCVTN2S/LUA_NOCVTS2N control how Lua performs some
+** coercions. Define LUA_NOCVTN2S to turn off automatic coercion from
+** numbers to strings. Define LUA_NOCVTS2N to turn off automatic
+** coercion from strings to numbers.
+*/
+/* #define LUA_NOCVTN2S */
+/* #define LUA_NOCVTS2N */
+
+
+/*
+@@ LUA_USE_APICHECK turns on several consistency checks on the C API.
+** Define it as a help when debugging C code.
+*/
+#if defined(LUA_USE_APICHECK)
+#include <assert.h>
+#define luai_apicheck(l,e) assert(e)
+#endif
+
+/* }================================================================== */
+
+
+/*
+** {==================================================================
+** Macros that affect the API and must be stable (that is, must be the
+** same when you compile Lua and when you compile code that links to
+** Lua). You probably do not want/need to change them.
+** =====================================================================
+*/
+
+/*
+@@ LUAI_MAXSTACK limits the size of the Lua stack.
+** CHANGE it if you need a different limit. This limit is arbitrary;
+** its only purpose is to stop Lua from consuming unlimited stack
+** space (and to reserve some numbers for pseudo-indices).
+*/
+#if LUAI_BITSINT >= 32
+#define LUAI_MAXSTACK 1000000
+#else
+#define LUAI_MAXSTACK 15000
+#endif
+
+
+/*
+@@ LUA_EXTRASPACE defines the size of a raw memory area associated with
+** a Lua state with very fast access.
+** CHANGE it if you need a different size.
+*/
+#define LUA_EXTRASPACE (sizeof(void *))
+
+
+/*
+@@ LUA_IDSIZE gives the maximum size for the description of the source
+@@ of a function in debug information.
+** CHANGE it if you want a different size.
+*/
+#define LUA_IDSIZE 60
+
+
+/*
+@@ LUAL_BUFFERSIZE is the buffer size used by the lauxlib buffer system.
+** CHANGE it if it uses too much C-stack space. (For long double,
+** 'string.format("%.99f", -1e4932)' needs 5034 bytes, so a
+** smaller buffer would force a memory allocation for each call to
+** 'string.format'.)
+*/
+#if LUA_FLOAT_TYPE == LUA_FLOAT_LONGDOUBLE
+#define LUAL_BUFFERSIZE 8192
+#else
+#define LUAL_BUFFERSIZE ((int)(0x80 * sizeof(void*) * sizeof(lua_Integer)))
+#endif
+
+/* }================================================================== */
+
+
+/*
+@@ LUA_QL describes how error messages quote program elements.
+** Lua does not use these macros anymore; they are here for
+** compatibility only.
+*/
+#define LUA_QL(x) "'" x "'"
+#define LUA_QS LUA_QL("%s")
+
+
+
+
+/* =================================================================== */
+
+/*
+** Local configuration. You can use this space to add your redefinitions
+** without modifying the main part of the file.
+*/
+
+
+
+
+
+#endif
+
diff --git a/lua/src/lualib.h b/lua/src/lualib.h
new file mode 100644
index 000000000..6c0bc4cb0
--- /dev/null
+++ b/lua/src/lualib.h
@@ -0,0 +1,61 @@
+/*
+** $Id: lualib.h,v 1.45 2017/01/12 17:14:26 roberto Exp $
+** Lua standard libraries
+** See Copyright Notice in lua.h
+*/
+
+
+#ifndef lualib_h
+#define lualib_h
+
+#include "lua.h"
+
+
+/* version suffix for environment variable names */
+#define LUA_VERSUFFIX "_" LUA_VERSION_MAJOR "_" LUA_VERSION_MINOR
+
+
+LUAMOD_API int (luaopen_base) (lua_State *L);
+
+#define LUA_COLIBNAME "coroutine"
+LUAMOD_API int (luaopen_coroutine) (lua_State *L);
+
+#define LUA_TABLIBNAME "table"
+LUAMOD_API int (luaopen_table) (lua_State *L);
+
+#define LUA_IOLIBNAME "io"
+LUAMOD_API int (luaopen_io) (lua_State *L);
+
+#define LUA_OSLIBNAME "os"
+LUAMOD_API int (luaopen_os) (lua_State *L);
+
+#define LUA_STRLIBNAME "string"
+LUAMOD_API int (luaopen_string) (lua_State *L);
+
+#define LUA_UTF8LIBNAME "utf8"
+LUAMOD_API int (luaopen_utf8) (lua_State *L);
+
+#define LUA_BITLIBNAME "bit32"
+LUAMOD_API int (luaopen_bit32) (lua_State *L);
+
+#define LUA_MATHLIBNAME "math"
+LUAMOD_API int (luaopen_math) (lua_State *L);
+
+#define LUA_DBLIBNAME "debug"
+LUAMOD_API int (luaopen_debug) (lua_State *L);
+
+#define LUA_LOADLIBNAME "package"
+LUAMOD_API int (luaopen_package) (lua_State *L);
+
+
+/* open all previous libraries */
+LUALIB_API void (luaL_openlibs) (lua_State *L);
+
+
+
+#if !defined(lua_assert)
+#define lua_assert(x) ((void)0)
+#endif
+
+
+#endif
diff --git a/lua/src/lundump.c b/lua/src/lundump.c
new file mode 100644
index 000000000..4080af9c0
--- /dev/null
+++ b/lua/src/lundump.c
@@ -0,0 +1,279 @@
+/*
+** $Id: lundump.c,v 2.44 2015/11/02 16:09:30 roberto Exp $
+** load precompiled Lua chunks
+** See Copyright Notice in lua.h
+*/
+
+#define lundump_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lmem.h"
+#include "lobject.h"
+#include "lstring.h"
+#include "lundump.h"
+#include "lzio.h"
+
+
+#if !defined(luai_verifycode)
+#define luai_verifycode(L,b,f) /* empty */
+#endif
+
+
+typedef struct {
+ lua_State *L;
+ ZIO *Z;
+ const char *name;
+} LoadState;
+
+
+static l_noret error(LoadState *S, const char *why) {
+ luaO_pushfstring(S->L, "%s: %s precompiled chunk", S->name, why);
+ luaD_throw(S->L, LUA_ERRSYNTAX);
+}
+
+
+/*
+** All high-level loads go through LoadVector; you can change it to
+** adapt to the endianness of the input
+*/
+#define LoadVector(S,b,n) LoadBlock(S,b,(n)*sizeof((b)[0]))
+
+static void LoadBlock (LoadState *S, void *b, size_t size) {
+ if (luaZ_read(S->Z, b, size) != 0)
+ error(S, "truncated");
+}
+
+
+#define LoadVar(S,x) LoadVector(S,&x,1)
+
+
+static lu_byte LoadByte (LoadState *S) {
+ lu_byte x;
+ LoadVar(S, x);
+ return x;
+}
+
+
+static int LoadInt (LoadState *S) {
+ int x;
+ LoadVar(S, x);
+ return x;
+}
+
+
+static lua_Number LoadNumber (LoadState *S) {
+ lua_Number x;
+ LoadVar(S, x);
+ return x;
+}
+
+
+static lua_Integer LoadInteger (LoadState *S) {
+ lua_Integer x;
+ LoadVar(S, x);
+ return x;
+}
+
+
+static TString *LoadString (LoadState *S) {
+ size_t size = LoadByte(S);
+ if (size == 0xFF)
+ LoadVar(S, size);
+ if (size == 0)
+ return NULL;
+ else if (--size <= LUAI_MAXSHORTLEN) { /* short string? */
+ char buff[LUAI_MAXSHORTLEN];
+ LoadVector(S, buff, size);
+ return luaS_newlstr(S->L, buff, size);
+ }
+ else { /* long string */
+ TString *ts = luaS_createlngstrobj(S->L, size);
+ LoadVector(S, getstr(ts), size); /* load directly in final place */
+ return ts;
+ }
+}
+
+
+static void LoadCode (LoadState *S, Proto *f) {
+ int n = LoadInt(S);
+ f->code = luaM_newvector(S->L, n, Instruction);
+ f->sizecode = n;
+ LoadVector(S, f->code, n);
+}
+
+
+static void LoadFunction(LoadState *S, Proto *f, TString *psource);
+
+
+static void LoadConstants (LoadState *S, Proto *f) {
+ int i;
+ int n = LoadInt(S);
+ f->k = luaM_newvector(S->L, n, TValue);
+ f->sizek = n;
+ for (i = 0; i < n; i++)
+ setnilvalue(&f->k[i]);
+ for (i = 0; i < n; i++) {
+ TValue *o = &f->k[i];
+ int t = LoadByte(S);
+ switch (t) {
+ case LUA_TNIL:
+ setnilvalue(o);
+ break;
+ case LUA_TBOOLEAN:
+ setbvalue(o, LoadByte(S));
+ break;
+ case LUA_TNUMFLT:
+ setfltvalue(o, LoadNumber(S));
+ break;
+ case LUA_TNUMINT:
+ setivalue(o, LoadInteger(S));
+ break;
+ case LUA_TSHRSTR:
+ case LUA_TLNGSTR:
+ setsvalue2n(S->L, o, LoadString(S));
+ break;
+ default:
+ lua_assert(0);
+ }
+ }
+}
+
+
+static void LoadProtos (LoadState *S, Proto *f) {
+ int i;
+ int n = LoadInt(S);
+ f->p = luaM_newvector(S->L, n, Proto *);
+ f->sizep = n;
+ for (i = 0; i < n; i++)
+ f->p[i] = NULL;
+ for (i = 0; i < n; i++) {
+ f->p[i] = luaF_newproto(S->L);
+ LoadFunction(S, f->p[i], f->source);
+ }
+}
+
+
+static void LoadUpvalues (LoadState *S, Proto *f) {
+ int i, n;
+ n = LoadInt(S);
+ f->upvalues = luaM_newvector(S->L, n, Upvaldesc);
+ f->sizeupvalues = n;
+ for (i = 0; i < n; i++)
+ f->upvalues[i].name = NULL;
+ for (i = 0; i < n; i++) {
+ f->upvalues[i].instack = LoadByte(S);
+ f->upvalues[i].idx = LoadByte(S);
+ }
+}
+
+
+static void LoadDebug (LoadState *S, Proto *f) {
+ int i, n;
+ n = LoadInt(S);
+ f->lineinfo = luaM_newvector(S->L, n, int);
+ f->sizelineinfo = n;
+ LoadVector(S, f->lineinfo, n);
+ n = LoadInt(S);
+ f->locvars = luaM_newvector(S->L, n, LocVar);
+ f->sizelocvars = n;
+ for (i = 0; i < n; i++)
+ f->locvars[i].varname = NULL;
+ for (i = 0; i < n; i++) {
+ f->locvars[i].varname = LoadString(S);
+ f->locvars[i].startpc = LoadInt(S);
+ f->locvars[i].endpc = LoadInt(S);
+ }
+ n = LoadInt(S);
+ for (i = 0; i < n; i++)
+ f->upvalues[i].name = LoadString(S);
+}
+
+
+static void LoadFunction (LoadState *S, Proto *f, TString *psource) {
+ f->source = LoadString(S);
+ if (f->source == NULL) /* no source in dump? */
+ f->source = psource; /* reuse parent's source */
+ f->linedefined = LoadInt(S);
+ f->lastlinedefined = LoadInt(S);
+ f->numparams = LoadByte(S);
+ f->is_vararg = LoadByte(S);
+ f->maxstacksize = LoadByte(S);
+ LoadCode(S, f);
+ LoadConstants(S, f);
+ LoadUpvalues(S, f);
+ LoadProtos(S, f);
+ LoadDebug(S, f);
+}
+
+
+static void checkliteral (LoadState *S, const char *s, const char *msg) {
+ char buff[sizeof(LUA_SIGNATURE) + sizeof(LUAC_DATA)]; /* larger than both */
+ size_t len = strlen(s);
+ LoadVector(S, buff, len);
+ if (memcmp(s, buff, len) != 0)
+ error(S, msg);
+}
+
+
+static void fchecksize (LoadState *S, size_t size, const char *tname) {
+ if (LoadByte(S) != size)
+ error(S, luaO_pushfstring(S->L, "%s size mismatch in", tname));
+}
+
+
+#define checksize(S,t) fchecksize(S,sizeof(t),#t)
+
+static void checkHeader (LoadState *S) {
+ checkliteral(S, LUA_SIGNATURE + 1, "not a"); /* 1st char already checked */
+ if (LoadByte(S) != LUAC_VERSION)
+ error(S, "version mismatch in");
+ if (LoadByte(S) != LUAC_FORMAT)
+ error(S, "format mismatch in");
+ checkliteral(S, LUAC_DATA, "corrupted");
+ checksize(S, int);
+ checksize(S, size_t);
+ checksize(S, Instruction);
+ checksize(S, lua_Integer);
+ checksize(S, lua_Number);
+ if (LoadInteger(S) != LUAC_INT)
+ error(S, "endianness mismatch in");
+ if (LoadNumber(S) != LUAC_NUM)
+ error(S, "float format mismatch in");
+}
+
+
+/*
+** load precompiled chunk
+*/
+LClosure *luaU_undump(lua_State *L, ZIO *Z, const char *name) {
+ LoadState S;
+ LClosure *cl;
+ if (*name == '@' || *name == '=')
+ S.name = name + 1;
+ else if (*name == LUA_SIGNATURE[0])
+ S.name = "binary string";
+ else
+ S.name = name;
+ S.L = L;
+ S.Z = Z;
+ checkHeader(&S);
+ cl = luaF_newLclosure(L, LoadByte(&S));
+ setclLvalue(L, L->top, cl);
+ luaD_inctop(L);
+ cl->p = luaF_newproto(L);
+ LoadFunction(&S, cl->p, NULL);
+ lua_assert(cl->nupvalues == cl->p->sizeupvalues);
+ luai_verifycode(L, buff, cl->p);
+ return cl;
+}
+
diff --git a/lua/src/lundump.h b/lua/src/lundump.h
new file mode 100644
index 000000000..aa5cc82f1
--- /dev/null
+++ b/lua/src/lundump.h
@@ -0,0 +1,32 @@
+/*
+** $Id: lundump.h,v 1.45 2015/09/08 15:41:05 roberto Exp $
+** load precompiled Lua chunks
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lundump_h
+#define lundump_h
+
+#include "llimits.h"
+#include "lobject.h"
+#include "lzio.h"
+
+
+/* data to catch conversion errors */
+#define LUAC_DATA "\x19\x93\r\n\x1a\n"
+
+#define LUAC_INT 0x5678
+#define LUAC_NUM cast_num(370.5)
+
+#define MYINT(s) (s[0]-'0')
+#define LUAC_VERSION (MYINT(LUA_VERSION_MAJOR)*16+MYINT(LUA_VERSION_MINOR))
+#define LUAC_FORMAT 0 /* this is the official format */
+
+/* load one chunk; from lundump.c */
+LUAI_FUNC LClosure* luaU_undump (lua_State* L, ZIO* Z, const char* name);
+
+/* dump one chunk; from ldump.c */
+LUAI_FUNC int luaU_dump (lua_State* L, const Proto* f, lua_Writer w,
+ void* data, int strip);
+
+#endif
diff --git a/lua/src/lutf8lib.c b/lua/src/lutf8lib.c
new file mode 100644
index 000000000..de9e3dcdd
--- /dev/null
+++ b/lua/src/lutf8lib.c
@@ -0,0 +1,256 @@
+/*
+** $Id: lutf8lib.c,v 1.16 2016/12/22 13:08:50 roberto Exp $
+** Standard library for UTF-8 manipulation
+** See Copyright Notice in lua.h
+*/
+
+#define lutf8lib_c
+#define LUA_LIB
+
+#include "lprefix.h"
+
+
+#include <assert.h>
+#include <limits.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "lauxlib.h"
+#include "lualib.h"
+
+#define MAXUNICODE 0x10FFFF
+
+#define iscont(p) ((*(p) & 0xC0) == 0x80)
+
+
+/* from strlib */
+/* translate a relative string position: negative means back from end */
+static lua_Integer u_posrelat (lua_Integer pos, size_t len) {
+ if (pos >= 0) return pos;
+ else if (0u - (size_t)pos > len) return 0;
+ else return (lua_Integer)len + pos + 1;
+}
+
+
+/*
+** Decode one UTF-8 sequence, returning NULL if byte sequence is invalid.
+*/
+static const char *utf8_decode (const char *o, int *val) {
+ static const unsigned int limits[] = {0xFF, 0x7F, 0x7FF, 0xFFFF};
+ const unsigned char *s = (const unsigned char *)o;
+ unsigned int c = s[0];
+ unsigned int res = 0; /* final result */
+ if (c < 0x80) /* ascii? */
+ res = c;
+ else {
+ int count = 0; /* to count number of continuation bytes */
+ while (c & 0x40) { /* still have continuation bytes? */
+ int cc = s[++count]; /* read next byte */
+ if ((cc & 0xC0) != 0x80) /* not a continuation byte? */
+ return NULL; /* invalid byte sequence */
+ res = (res << 6) | (cc & 0x3F); /* add lower 6 bits from cont. byte */
+ c <<= 1; /* to test next bit */
+ }
+ res |= ((c & 0x7F) << (count * 5)); /* add first byte */
+ if (count > 3 || res > MAXUNICODE || res <= limits[count])
+ return NULL; /* invalid byte sequence */
+ s += count; /* skip continuation bytes read */
+ }
+ if (val) *val = res;
+ return (const char *)s + 1; /* +1 to include first byte */
+}
+
+
+/*
+** utf8len(s [, i [, j]]) --> number of characters that start in the
+** range [i,j], or nil + current position if 's' is not well formed in
+** that interval
+*/
+static int utflen (lua_State *L) {
+ int n = 0;
+ size_t len;
+ const char *s = luaL_checklstring(L, 1, &len);
+ lua_Integer posi = u_posrelat(luaL_optinteger(L, 2, 1), len);
+ lua_Integer posj = u_posrelat(luaL_optinteger(L, 3, -1), len);
+ luaL_argcheck(L, 1 <= posi && --posi <= (lua_Integer)len, 2,
+ "initial position out of string");
+ luaL_argcheck(L, --posj < (lua_Integer)len, 3,
+ "final position out of string");
+ while (posi <= posj) {
+ const char *s1 = utf8_decode(s + posi, NULL);
+ if (s1 == NULL) { /* conversion error? */
+ lua_pushnil(L); /* return nil ... */
+ lua_pushinteger(L, posi + 1); /* ... and current position */
+ return 2;
+ }
+ posi = s1 - s;
+ n++;
+ }
+ lua_pushinteger(L, n);
+ return 1;
+}
+
+
+/*
+** codepoint(s, [i, [j]]) -> returns codepoints for all characters
+** that start in the range [i,j]
+*/
+static int codepoint (lua_State *L) {
+ size_t len;
+ const char *s = luaL_checklstring(L, 1, &len);
+ lua_Integer posi = u_posrelat(luaL_optinteger(L, 2, 1), len);
+ lua_Integer pose = u_posrelat(luaL_optinteger(L, 3, posi), len);
+ int n;
+ const char *se;
+ luaL_argcheck(L, posi >= 1, 2, "out of range");
+ luaL_argcheck(L, pose <= (lua_Integer)len, 3, "out of range");
+ if (posi > pose) return 0; /* empty interval; return no values */
+ if (pose - posi >= INT_MAX) /* (lua_Integer -> int) overflow? */
+ return luaL_error(L, "string slice too long");
+ n = (int)(pose - posi) + 1;
+ luaL_checkstack(L, n, "string slice too long");
+ n = 0;
+ se = s + pose;
+ for (s += posi - 1; s < se;) {
+ int code;
+ s = utf8_decode(s, &code);
+ if (s == NULL)
+ return luaL_error(L, "invalid UTF-8 code");
+ lua_pushinteger(L, code);
+ n++;
+ }
+ return n;
+}
+
+
+static void pushutfchar (lua_State *L, int arg) {
+ lua_Integer code = luaL_checkinteger(L, arg);
+ luaL_argcheck(L, 0 <= code && code <= MAXUNICODE, arg, "value out of range");
+ lua_pushfstring(L, "%U", (long)code);
+}
+
+
+/*
+** utfchar(n1, n2, ...) -> char(n1)..char(n2)...
+*/
+static int utfchar (lua_State *L) {
+ int n = lua_gettop(L); /* number of arguments */
+ if (n == 1) /* optimize common case of single char */
+ pushutfchar(L, 1);
+ else {
+ int i;
+ luaL_Buffer b;
+ luaL_buffinit(L, &b);
+ for (i = 1; i <= n; i++) {
+ pushutfchar(L, i);
+ luaL_addvalue(&b);
+ }
+ luaL_pushresult(&b);
+ }
+ return 1;
+}
+
+
+/*
+** offset(s, n, [i]) -> index where n-th character counting from
+** position 'i' starts; 0 means character at 'i'.
+*/
+static int byteoffset (lua_State *L) {
+ size_t len;
+ const char *s = luaL_checklstring(L, 1, &len);
+ lua_Integer n = luaL_checkinteger(L, 2);
+ lua_Integer posi = (n >= 0) ? 1 : len + 1;
+ posi = u_posrelat(luaL_optinteger(L, 3, posi), len);
+ luaL_argcheck(L, 1 <= posi && --posi <= (lua_Integer)len, 3,
+ "position out of range");
+ if (n == 0) {
+ /* find beginning of current byte sequence */
+ while (posi > 0 && iscont(s + posi)) posi--;
+ }
+ else {
+ if (iscont(s + posi))
+ luaL_error(L, "initial position is a continuation byte");
+ if (n < 0) {
+ while (n < 0 && posi > 0) { /* move back */
+ do { /* find beginning of previous character */
+ posi--;
+ } while (posi > 0 && iscont(s + posi));
+ n++;
+ }
+ }
+ else {
+ n--; /* do not move for 1st character */
+ while (n > 0 && posi < (lua_Integer)len) {
+ do { /* find beginning of next character */
+ posi++;
+ } while (iscont(s + posi)); /* (cannot pass final '\0') */
+ n--;
+ }
+ }
+ }
+ if (n == 0) /* did it find given character? */
+ lua_pushinteger(L, posi + 1);
+ else /* no such character */
+ lua_pushnil(L);
+ return 1;
+}
+
+
+static int iter_aux (lua_State *L) {
+ size_t len;
+ const char *s = luaL_checklstring(L, 1, &len);
+ lua_Integer n = lua_tointeger(L, 2) - 1;
+ if (n < 0) /* first iteration? */
+ n = 0; /* start from here */
+ else if (n < (lua_Integer)len) {
+ n++; /* skip current byte */
+ while (iscont(s + n)) n++; /* and its continuations */
+ }
+ if (n >= (lua_Integer)len)
+ return 0; /* no more codepoints */
+ else {
+ int code;
+ const char *next = utf8_decode(s + n, &code);
+ if (next == NULL || iscont(next))
+ return luaL_error(L, "invalid UTF-8 code");
+ lua_pushinteger(L, n + 1);
+ lua_pushinteger(L, code);
+ return 2;
+ }
+}
+
+
+static int iter_codes (lua_State *L) {
+ luaL_checkstring(L, 1);
+ lua_pushcfunction(L, iter_aux);
+ lua_pushvalue(L, 1);
+ lua_pushinteger(L, 0);
+ return 3;
+}
+
+
+/* pattern to match a single UTF-8 character */
+#define UTF8PATT "[\0-\x7F\xC2-\xF4][\x80-\xBF]*"
+
+
+static const luaL_Reg funcs[] = {
+ {"offset", byteoffset},
+ {"codepoint", codepoint},
+ {"char", utfchar},
+ {"len", utflen},
+ {"codes", iter_codes},
+ /* placeholders */
+ {"charpattern", NULL},
+ {NULL, NULL}
+};
+
+
+LUAMOD_API int luaopen_utf8 (lua_State *L) {
+ luaL_newlib(L, funcs);
+ lua_pushlstring(L, UTF8PATT, sizeof(UTF8PATT)/sizeof(char) - 1);
+ lua_setfield(L, -2, "charpattern");
+ return 1;
+}
+
diff --git a/lua/src/lvm.c b/lua/src/lvm.c
new file mode 100644
index 000000000..84ade6b2f
--- /dev/null
+++ b/lua/src/lvm.c
@@ -0,0 +1,1322 @@
+/*
+** $Id: lvm.c,v 2.268 2016/02/05 19:59:14 roberto Exp $
+** Lua virtual machine
+** See Copyright Notice in lua.h
+*/
+
+#define lvm_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+#include <float.h>
+#include <limits.h>
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "lua.h"
+
+#include "ldebug.h"
+#include "ldo.h"
+#include "lfunc.h"
+#include "lgc.h"
+#include "lobject.h"
+#include "lopcodes.h"
+#include "lstate.h"
+#include "lstring.h"
+#include "ltable.h"
+#include "ltm.h"
+#include "lvm.h"
+
+
+/* limit for table tag-method chains (to avoid loops) */
+#define MAXTAGLOOP 2000
+
+
+
+/*
+** 'l_intfitsf' checks whether a given integer can be converted to a
+** float without rounding. Used in comparisons. Left undefined if
+** all integers fit in a float precisely.
+*/
+#if !defined(l_intfitsf)
+
+/* number of bits in the mantissa of a float */
+#define NBM (l_mathlim(MANT_DIG))
+
+/*
+** Check whether some integers may not fit in a float, that is, whether
+** (maxinteger >> NBM) > 0 (that implies (1 << NBM) <= maxinteger).
+** (The shifts are done in parts to avoid shifting by more than the size
+** of an integer. In a worst case, NBM == 113 for long double and
+** sizeof(integer) == 32.)
+*/
+#if ((((LUA_MAXINTEGER >> (NBM / 4)) >> (NBM / 4)) >> (NBM / 4)) \
+ >> (NBM - (3 * (NBM / 4)))) > 0
+
+#define l_intfitsf(i) \
+ (-((lua_Integer)1 << NBM) <= (i) && (i) <= ((lua_Integer)1 << NBM))
+
+#endif
+
+#endif
+
+
+
+/*
+** Try to convert a value to a float. The float case is already handled
+** by the macro 'tonumber'.
+*/
+int luaV_tonumber_ (const TValue *obj, lua_Number *n) {
+ TValue v;
+ if (ttisinteger(obj)) {
+ *n = cast_num(ivalue(obj));
+ return 1;
+ }
+ else if (cvt2num(obj) && /* string convertible to number? */
+ luaO_str2num(svalue(obj), &v) == vslen(obj) + 1) {
+ *n = nvalue(&v); /* convert result of 'luaO_str2num' to a float */
+ return 1;
+ }
+ else
+ return 0; /* conversion failed */
+}
+
+
+/*
+** try to convert a value to an integer, rounding according to 'mode':
+** mode == 0: accepts only integral values
+** mode == 1: takes the floor of the number
+** mode == 2: takes the ceil of the number
+*/
+int luaV_tointeger (const TValue *obj, lua_Integer *p, int mode) {
+ TValue v;
+ again:
+ if (ttisfloat(obj)) {
+ lua_Number n = fltvalue(obj);
+ lua_Number f = l_floor(n);
+ if (n != f) { /* not an integral value? */
+ if (mode == 0) return 0; /* fails if mode demands integral value */
+ else if (mode > 1) /* needs ceil? */
+ f += 1; /* convert floor to ceil (remember: n != f) */
+ }
+ return lua_numbertointeger(f, p);
+ }
+ else if (ttisinteger(obj)) {
+ *p = ivalue(obj);
+ return 1;
+ }
+ else if (cvt2num(obj) &&
+ luaO_str2num(svalue(obj), &v) == vslen(obj) + 1) {
+ obj = &v;
+ goto again; /* convert result from 'luaO_str2num' to an integer */
+ }
+ return 0; /* conversion failed */
+}
+
+
+/*
+** Try to convert a 'for' limit to an integer, preserving the
+** semantics of the loop.
+** (The following explanation assumes a non-negative step; it is valid
+** for negative steps mutatis mutandis.)
+** If the limit can be converted to an integer, rounding down, that is
+** it.
+** Otherwise, check whether the limit can be converted to a number. If
+** the number is too large, it is OK to set the limit as LUA_MAXINTEGER,
+** which means no limit. If the number is too negative, the loop
+** should not run, because any initial integer value is larger than the
+** limit. So, it sets the limit to LUA_MININTEGER. 'stopnow' corrects
+** the extreme case when the initial value is LUA_MININTEGER, in which
+** case the LUA_MININTEGER limit would still run the loop once.
+*/
+static int forlimit (const TValue *obj, lua_Integer *p, lua_Integer step,
+ int *stopnow) {
+ *stopnow = 0; /* usually, let loops run */
+ if (!luaV_tointeger(obj, p, (step < 0 ? 2 : 1))) { /* not fit in integer? */
+ lua_Number n; /* try to convert to float */
+ if (!tonumber(obj, &n)) /* cannot convert to float? */
+ return 0; /* not a number */
+ if (luai_numlt(0, n)) { /* if true, float is larger than max integer */
+ *p = LUA_MAXINTEGER;
+ if (step < 0) *stopnow = 1;
+ }
+ else { /* float is smaller than min integer */
+ *p = LUA_MININTEGER;
+ if (step >= 0) *stopnow = 1;
+ }
+ }
+ return 1;
+}
+
+
+/*
+** Finish the table access 'val = t[key]'.
+** if 'slot' is NULL, 't' is not a table; otherwise, 'slot' points to
+** t[k] entry (which must be nil).
+*/
+void luaV_finishget (lua_State *L, const TValue *t, TValue *key, StkId val,
+ const TValue *slot) {
+ int loop; /* counter to avoid infinite loops */
+ const TValue *tm; /* metamethod */
+ for (loop = 0; loop < MAXTAGLOOP; loop++) {
+ if (slot == NULL) { /* 't' is not a table? */
+ lua_assert(!ttistable(t));
+ tm = luaT_gettmbyobj(L, t, TM_INDEX);
+ if (ttisnil(tm))
+ luaG_typeerror(L, t, "index"); /* no metamethod */
+ /* else will try the metamethod */
+ }
+ else { /* 't' is a table */
+ lua_assert(ttisnil(slot));
+ tm = fasttm(L, hvalue(t)->metatable, TM_INDEX); /* table's metamethod */
+ if (tm == NULL) { /* no metamethod? */
+ setnilvalue(val); /* result is nil */
+ return;
+ }
+ /* else will try the metamethod */
+ }
+ if (ttisfunction(tm)) { /* is metamethod a function? */
+ luaT_callTM(L, tm, t, key, val, 1); /* call it */
+ return;
+ }
+ t = tm; /* else try to access 'tm[key]' */
+ if (luaV_fastget(L,t,key,slot,luaH_get)) { /* fast track? */
+ setobj2s(L, val, slot); /* done */
+ return;
+ }
+ /* else repeat (tail call 'luaV_finishget') */
+ }
+ luaG_runerror(L, "'__index' chain too long; possible loop");
+}
+
+
+/*
+** Finish a table assignment 't[key] = val'.
+** If 'slot' is NULL, 't' is not a table. Otherwise, 'slot' points
+** to the entry 't[key]', or to 'luaO_nilobject' if there is no such
+** entry. (The value at 'slot' must be nil, otherwise 'luaV_fastset'
+** would have done the job.)
+*/
+void luaV_finishset (lua_State *L, const TValue *t, TValue *key,
+ StkId val, const TValue *slot) {
+ int loop; /* counter to avoid infinite loops */
+ for (loop = 0; loop < MAXTAGLOOP; loop++) {
+ const TValue *tm; /* '__newindex' metamethod */
+ if (slot != NULL) { /* is 't' a table? */
+ Table *h = hvalue(t); /* save 't' table */
+ lua_assert(ttisnil(slot)); /* old value must be nil */
+ tm = fasttm(L, h->metatable, TM_NEWINDEX); /* get metamethod */
+ if (tm == NULL) { /* no metamethod? */
+ if (slot == luaO_nilobject) /* no previous entry? */
+ slot = luaH_newkey(L, h, key); /* create one */
+ /* no metamethod and (now) there is an entry with given key */
+ setobj2t(L, cast(TValue *, slot), val); /* set its new value */
+ invalidateTMcache(h);
+ luaC_barrierback(L, h, val);
+ return;
+ }
+ /* else will try the metamethod */
+ }
+ else { /* not a table; check metamethod */
+ if (ttisnil(tm = luaT_gettmbyobj(L, t, TM_NEWINDEX)))
+ luaG_typeerror(L, t, "index");
+ }
+ /* try the metamethod */
+ if (ttisfunction(tm)) {
+ luaT_callTM(L, tm, t, key, val, 0);
+ return;
+ }
+ t = tm; /* else repeat assignment over 'tm' */
+ if (luaV_fastset(L, t, key, slot, luaH_get, val))
+ return; /* done */
+ /* else loop */
+ }
+ luaG_runerror(L, "'__newindex' chain too long; possible loop");
+}
+
+
+/*
+** Compare two strings 'ls' x 'rs', returning an integer smaller-equal-
+** -larger than zero if 'ls' is smaller-equal-larger than 'rs'.
+** The code is a little tricky because it allows '\0' in the strings
+** and it uses 'strcoll' (to respect locales) for each segments
+** of the strings.
+*/
+static int l_strcmp (const TString *ls, const TString *rs) {
+ const char *l = getstr(ls);
+ size_t ll = tsslen(ls);
+ const char *r = getstr(rs);
+ size_t lr = tsslen(rs);
+ for (;;) { /* for each segment */
+ int temp = strcoll(l, r);
+ if (temp != 0) /* not equal? */
+ return temp; /* done */
+ else { /* strings are equal up to a '\0' */
+ size_t len = strlen(l); /* index of first '\0' in both strings */
+ if (len == lr) /* 'rs' is finished? */
+ return (len == ll) ? 0 : 1; /* check 'ls' */
+ else if (len == ll) /* 'ls' is finished? */
+ return -1; /* 'ls' is smaller than 'rs' ('rs' is not finished) */
+ /* both strings longer than 'len'; go on comparing after the '\0' */
+ len++;
+ l += len; ll -= len; r += len; lr -= len;
+ }
+ }
+}
+
+
+/*
+** Check whether integer 'i' is less than float 'f'. If 'i' has an
+** exact representation as a float ('l_intfitsf'), compare numbers as
+** floats. Otherwise, if 'f' is outside the range for integers, result
+** is trivial. Otherwise, compare them as integers. (When 'i' has no
+** float representation, either 'f' is "far away" from 'i' or 'f' has
+** no precision left for a fractional part; either way, how 'f' is
+** truncated is irrelevant.) When 'f' is NaN, comparisons must result
+** in false.
+*/
+static int LTintfloat (lua_Integer i, lua_Number f) {
+#if defined(l_intfitsf)
+ if (!l_intfitsf(i)) {
+ if (f >= -cast_num(LUA_MININTEGER)) /* -minint == maxint + 1 */
+ return 1; /* f >= maxint + 1 > i */
+ else if (f > cast_num(LUA_MININTEGER)) /* minint < f <= maxint ? */
+ return (i < cast(lua_Integer, f)); /* compare them as integers */
+ else /* f <= minint <= i (or 'f' is NaN) --> not(i < f) */
+ return 0;
+ }
+#endif
+ return luai_numlt(cast_num(i), f); /* compare them as floats */
+}
+
+
+/*
+** Check whether integer 'i' is less than or equal to float 'f'.
+** See comments on previous function.
+*/
+static int LEintfloat (lua_Integer i, lua_Number f) {
+#if defined(l_intfitsf)
+ if (!l_intfitsf(i)) {
+ if (f >= -cast_num(LUA_MININTEGER)) /* -minint == maxint + 1 */
+ return 1; /* f >= maxint + 1 > i */
+ else if (f >= cast_num(LUA_MININTEGER)) /* minint <= f <= maxint ? */
+ return (i <= cast(lua_Integer, f)); /* compare them as integers */
+ else /* f < minint <= i (or 'f' is NaN) --> not(i <= f) */
+ return 0;
+ }
+#endif
+ return luai_numle(cast_num(i), f); /* compare them as floats */
+}
+
+
+/*
+** Return 'l < r', for numbers.
+*/
+static int LTnum (const TValue *l, const TValue *r) {
+ if (ttisinteger(l)) {
+ lua_Integer li = ivalue(l);
+ if (ttisinteger(r))
+ return li < ivalue(r); /* both are integers */
+ else /* 'l' is int and 'r' is float */
+ return LTintfloat(li, fltvalue(r)); /* l < r ? */
+ }
+ else {
+ lua_Number lf = fltvalue(l); /* 'l' must be float */
+ if (ttisfloat(r))
+ return luai_numlt(lf, fltvalue(r)); /* both are float */
+ else if (luai_numisnan(lf)) /* 'r' is int and 'l' is float */
+ return 0; /* NaN < i is always false */
+ else /* without NaN, (l < r) <--> not(r <= l) */
+ return !LEintfloat(ivalue(r), lf); /* not (r <= l) ? */
+ }
+}
+
+
+/*
+** Return 'l <= r', for numbers.
+*/
+static int LEnum (const TValue *l, const TValue *r) {
+ if (ttisinteger(l)) {
+ lua_Integer li = ivalue(l);
+ if (ttisinteger(r))
+ return li <= ivalue(r); /* both are integers */
+ else /* 'l' is int and 'r' is float */
+ return LEintfloat(li, fltvalue(r)); /* l <= r ? */
+ }
+ else {
+ lua_Number lf = fltvalue(l); /* 'l' must be float */
+ if (ttisfloat(r))
+ return luai_numle(lf, fltvalue(r)); /* both are float */
+ else if (luai_numisnan(lf)) /* 'r' is int and 'l' is float */
+ return 0; /* NaN <= i is always false */
+ else /* without NaN, (l <= r) <--> not(r < l) */
+ return !LTintfloat(ivalue(r), lf); /* not (r < l) ? */
+ }
+}
+
+
+/*
+** Main operation less than; return 'l < r'.
+*/
+int luaV_lessthan (lua_State *L, const TValue *l, const TValue *r) {
+ int res;
+ if (ttisnumber(l) && ttisnumber(r)) /* both operands are numbers? */
+ return LTnum(l, r);
+ else if (ttisstring(l) && ttisstring(r)) /* both are strings? */
+ return l_strcmp(tsvalue(l), tsvalue(r)) < 0;
+ else if ((res = luaT_callorderTM(L, l, r, TM_LT)) < 0) /* no metamethod? */
+ luaG_ordererror(L, l, r); /* error */
+ return res;
+}
+
+
+/*
+** Main operation less than or equal to; return 'l <= r'. If it needs
+** a metamethod and there is no '__le', try '__lt', based on
+** l <= r iff !(r < l) (assuming a total order). If the metamethod
+** yields during this substitution, the continuation has to know
+** about it (to negate the result of r<l); bit CIST_LEQ in the call
+** status keeps that information.
+*/
+int luaV_lessequal (lua_State *L, const TValue *l, const TValue *r) {
+ int res;
+ if (ttisnumber(l) && ttisnumber(r)) /* both operands are numbers? */
+ return LEnum(l, r);
+ else if (ttisstring(l) && ttisstring(r)) /* both are strings? */
+ return l_strcmp(tsvalue(l), tsvalue(r)) <= 0;
+ else if ((res = luaT_callorderTM(L, l, r, TM_LE)) >= 0) /* try 'le' */
+ return res;
+ else { /* try 'lt': */
+ L->ci->callstatus |= CIST_LEQ; /* mark it is doing 'lt' for 'le' */
+ res = luaT_callorderTM(L, r, l, TM_LT);
+ L->ci->callstatus ^= CIST_LEQ; /* clear mark */
+ if (res < 0)
+ luaG_ordererror(L, l, r);
+ return !res; /* result is negated */
+ }
+}
+
+
+/*
+** Main operation for equality of Lua values; return 't1 == t2'.
+** L == NULL means raw equality (no metamethods)
+*/
+int luaV_equalobj (lua_State *L, const TValue *t1, const TValue *t2) {
+ const TValue *tm;
+ if (ttype(t1) != ttype(t2)) { /* not the same variant? */
+ if (ttnov(t1) != ttnov(t2) || ttnov(t1) != LUA_TNUMBER)
+ return 0; /* only numbers can be equal with different variants */
+ else { /* two numbers with different variants */
+ lua_Integer i1, i2; /* compare them as integers */
+ return (tointeger(t1, &i1) && tointeger(t2, &i2) && i1 == i2);
+ }
+ }
+ /* values have same type and same variant */
+ switch (ttype(t1)) {
+ case LUA_TNIL: return 1;
+ case LUA_TNUMINT: return (ivalue(t1) == ivalue(t2));
+ case LUA_TNUMFLT: return luai_numeq(fltvalue(t1), fltvalue(t2));
+ case LUA_TBOOLEAN: return bvalue(t1) == bvalue(t2); /* true must be 1 !! */
+ case LUA_TLIGHTUSERDATA: return pvalue(t1) == pvalue(t2);
+ case LUA_TLCF: return fvalue(t1) == fvalue(t2);
+ case LUA_TSHRSTR: return eqshrstr(tsvalue(t1), tsvalue(t2));
+ case LUA_TLNGSTR: return luaS_eqlngstr(tsvalue(t1), tsvalue(t2));
+ case LUA_TUSERDATA: {
+ if (uvalue(t1) == uvalue(t2)) return 1;
+ else if (L == NULL) return 0;
+ tm = fasttm(L, uvalue(t1)->metatable, TM_EQ);
+ if (tm == NULL)
+ tm = fasttm(L, uvalue(t2)->metatable, TM_EQ);
+ break; /* will try TM */
+ }
+ case LUA_TTABLE: {
+ if (hvalue(t1) == hvalue(t2)) return 1;
+ else if (L == NULL) return 0;
+ tm = fasttm(L, hvalue(t1)->metatable, TM_EQ);
+ if (tm == NULL)
+ tm = fasttm(L, hvalue(t2)->metatable, TM_EQ);
+ break; /* will try TM */
+ }
+ default:
+ return gcvalue(t1) == gcvalue(t2);
+ }
+ if (tm == NULL) /* no TM? */
+ return 0; /* objects are different */
+ luaT_callTM(L, tm, t1, t2, L->top, 1); /* call TM */
+ return !l_isfalse(L->top);
+}
+
+
+/* macro used by 'luaV_concat' to ensure that element at 'o' is a string */
+#define tostring(L,o) \
+ (ttisstring(o) || (cvt2str(o) && (luaO_tostring(L, o), 1)))
+
+#define isemptystr(o) (ttisshrstring(o) && tsvalue(o)->shrlen == 0)
+
+/* copy strings in stack from top - n up to top - 1 to buffer */
+static void copy2buff (StkId top, int n, char *buff) {
+ size_t tl = 0; /* size already copied */
+ do {
+ size_t l = vslen(top - n); /* length of string being copied */
+ memcpy(buff + tl, svalue(top - n), l * sizeof(char));
+ tl += l;
+ } while (--n > 0);
+}
+
+
+/*
+** Main operation for concatenation: concat 'total' values in the stack,
+** from 'L->top - total' up to 'L->top - 1'.
+*/
+void luaV_concat (lua_State *L, int total) {
+ lua_assert(total >= 2);
+ do {
+ StkId top = L->top;
+ int n = 2; /* number of elements handled in this pass (at least 2) */
+ if (!(ttisstring(top-2) || cvt2str(top-2)) || !tostring(L, top-1))
+ luaT_trybinTM(L, top-2, top-1, top-2, TM_CONCAT);
+ else if (isemptystr(top - 1)) /* second operand is empty? */
+ cast_void(tostring(L, top - 2)); /* result is first operand */
+ else if (isemptystr(top - 2)) { /* first operand is an empty string? */
+ setobjs2s(L, top - 2, top - 1); /* result is second op. */
+ }
+ else {
+ /* at least two non-empty string values; get as many as possible */
+ size_t tl = vslen(top - 1);
+ TString *ts;
+ /* collect total length and number of strings */
+ for (n = 1; n < total && tostring(L, top - n - 1); n++) {
+ size_t l = vslen(top - n - 1);
+ if (l >= (MAX_SIZE/sizeof(char)) - tl)
+ luaG_runerror(L, "string length overflow");
+ tl += l;
+ }
+ if (tl <= LUAI_MAXSHORTLEN) { /* is result a short string? */
+ char buff[LUAI_MAXSHORTLEN];
+ copy2buff(top, n, buff); /* copy strings to buffer */
+ ts = luaS_newlstr(L, buff, tl);
+ }
+ else { /* long string; copy strings directly to final result */
+ ts = luaS_createlngstrobj(L, tl);
+ copy2buff(top, n, getstr(ts));
+ }
+ setsvalue2s(L, top - n, ts); /* create result */
+ }
+ total -= n-1; /* got 'n' strings to create 1 new */
+ L->top -= n-1; /* popped 'n' strings and pushed one */
+ } while (total > 1); /* repeat until only 1 result left */
+}
+
+
+/*
+** Main operation 'ra' = #rb'.
+*/
+void luaV_objlen (lua_State *L, StkId ra, const TValue *rb) {
+ const TValue *tm;
+ switch (ttype(rb)) {
+ case LUA_TTABLE: {
+ Table *h = hvalue(rb);
+ tm = fasttm(L, h->metatable, TM_LEN);
+ if (tm) break; /* metamethod? break switch to call it */
+ setivalue(ra, luaH_getn(h)); /* else primitive len */
+ return;
+ }
+ case LUA_TSHRSTR: {
+ setivalue(ra, tsvalue(rb)->shrlen);
+ return;
+ }
+ case LUA_TLNGSTR: {
+ setivalue(ra, tsvalue(rb)->u.lnglen);
+ return;
+ }
+ default: { /* try metamethod */
+ tm = luaT_gettmbyobj(L, rb, TM_LEN);
+ if (ttisnil(tm)) /* no metamethod? */
+ luaG_typeerror(L, rb, "get length of");
+ break;
+ }
+ }
+ luaT_callTM(L, tm, rb, rb, ra, 1);
+}
+
+
+/*
+** Integer division; return 'm // n', that is, floor(m/n).
+** C division truncates its result (rounds towards zero).
+** 'floor(q) == trunc(q)' when 'q >= 0' or when 'q' is integer,
+** otherwise 'floor(q) == trunc(q) - 1'.
+*/
+lua_Integer luaV_div (lua_State *L, lua_Integer m, lua_Integer n) {
+ if (l_castS2U(n) + 1u <= 1u) { /* special cases: -1 or 0 */
+ if (n == 0)
+ luaG_runerror(L, "attempt to divide by zero");
+ return intop(-, 0, m); /* n==-1; avoid overflow with 0x80000...//-1 */
+ }
+ else {
+ lua_Integer q = m / n; /* perform C division */
+ if ((m ^ n) < 0 && m % n != 0) /* 'm/n' would be negative non-integer? */
+ q -= 1; /* correct result for different rounding */
+ return q;
+ }
+}
+
+
+/*
+** Integer modulus; return 'm % n'. (Assume that C '%' with
+** negative operands follows C99 behavior. See previous comment
+** about luaV_div.)
+*/
+lua_Integer luaV_mod (lua_State *L, lua_Integer m, lua_Integer n) {
+ if (l_castS2U(n) + 1u <= 1u) { /* special cases: -1 or 0 */
+ if (n == 0)
+ luaG_runerror(L, "attempt to perform 'n%%0'");
+ return 0; /* m % -1 == 0; avoid overflow with 0x80000...%-1 */
+ }
+ else {
+ lua_Integer r = m % n;
+ if (r != 0 && (m ^ n) < 0) /* 'm/n' would be non-integer negative? */
+ r += n; /* correct result for different rounding */
+ return r;
+ }
+}
+
+
+/* number of bits in an integer */
+#define NBITS cast_int(sizeof(lua_Integer) * CHAR_BIT)
+
+/*
+** Shift left operation. (Shift right just negates 'y'.)
+*/
+lua_Integer luaV_shiftl (lua_Integer x, lua_Integer y) {
+ if (y < 0) { /* shift right? */
+ if (y <= -NBITS) return 0;
+ else return intop(>>, x, -y);
+ }
+ else { /* shift left */
+ if (y >= NBITS) return 0;
+ else return intop(<<, x, y);
+ }
+}
+
+
+/*
+** check whether cached closure in prototype 'p' may be reused, that is,
+** whether there is a cached closure with the same upvalues needed by
+** new closure to be created.
+*/
+static LClosure *getcached (Proto *p, UpVal **encup, StkId base) {
+ LClosure *c = p->cache;
+ if (c != NULL) { /* is there a cached closure? */
+ int nup = p->sizeupvalues;
+ Upvaldesc *uv = p->upvalues;
+ int i;
+ for (i = 0; i < nup; i++) { /* check whether it has right upvalues */
+ TValue *v = uv[i].instack ? base + uv[i].idx : encup[uv[i].idx]->v;
+ if (c->upvals[i]->v != v)
+ return NULL; /* wrong upvalue; cannot reuse closure */
+ }
+ }
+ return c; /* return cached closure (or NULL if no cached closure) */
+}
+
+
+/*
+** create a new Lua closure, push it in the stack, and initialize
+** its upvalues. Note that the closure is not cached if prototype is
+** already black (which means that 'cache' was already cleared by the
+** GC).
+*/
+static void pushclosure (lua_State *L, Proto *p, UpVal **encup, StkId base,
+ StkId ra) {
+ int nup = p->sizeupvalues;
+ Upvaldesc *uv = p->upvalues;
+ int i;
+ LClosure *ncl = luaF_newLclosure(L, nup);
+ ncl->p = p;
+ setclLvalue(L, ra, ncl); /* anchor new closure in stack */
+ for (i = 0; i < nup; i++) { /* fill in its upvalues */
+ if (uv[i].instack) /* upvalue refers to local variable? */
+ ncl->upvals[i] = luaF_findupval(L, base + uv[i].idx);
+ else /* get upvalue from enclosing function */
+ ncl->upvals[i] = encup[uv[i].idx];
+ ncl->upvals[i]->refcount++;
+ /* new closure is white, so we do not need a barrier here */
+ }
+ if (!isblack(p)) /* cache will not break GC invariant? */
+ p->cache = ncl; /* save it on cache for reuse */
+}
+
+
+/*
+** finish execution of an opcode interrupted by an yield
+*/
+void luaV_finishOp (lua_State *L) {
+ CallInfo *ci = L->ci;
+ StkId base = ci->u.l.base;
+ Instruction inst = *(ci->u.l.savedpc - 1); /* interrupted instruction */
+ OpCode op = GET_OPCODE(inst);
+ switch (op) { /* finish its execution */
+ case OP_ADD: case OP_SUB: case OP_MUL: case OP_DIV: case OP_IDIV:
+ case OP_BAND: case OP_BOR: case OP_BXOR: case OP_SHL: case OP_SHR:
+ case OP_MOD: case OP_POW:
+ case OP_UNM: case OP_BNOT: case OP_LEN:
+ case OP_GETTABUP: case OP_GETTABLE: case OP_SELF: {
+ setobjs2s(L, base + GETARG_A(inst), --L->top);
+ break;
+ }
+ case OP_LE: case OP_LT: case OP_EQ: {
+ int res = !l_isfalse(L->top - 1);
+ L->top--;
+ if (ci->callstatus & CIST_LEQ) { /* "<=" using "<" instead? */
+ lua_assert(op == OP_LE);
+ ci->callstatus ^= CIST_LEQ; /* clear mark */
+ res = !res; /* negate result */
+ }
+ lua_assert(GET_OPCODE(*ci->u.l.savedpc) == OP_JMP);
+ if (res != GETARG_A(inst)) /* condition failed? */
+ ci->u.l.savedpc++; /* skip jump instruction */
+ break;
+ }
+ case OP_CONCAT: {
+ StkId top = L->top - 1; /* top when 'luaT_trybinTM' was called */
+ int b = GETARG_B(inst); /* first element to concatenate */
+ int total = cast_int(top - 1 - (base + b)); /* yet to concatenate */
+ setobj2s(L, top - 2, top); /* put TM result in proper position */
+ if (total > 1) { /* are there elements to concat? */
+ L->top = top - 1; /* top is one after last element (at top-2) */
+ luaV_concat(L, total); /* concat them (may yield again) */
+ }
+ /* move final result to final position */
+ setobj2s(L, ci->u.l.base + GETARG_A(inst), L->top - 1);
+ L->top = ci->top; /* restore top */
+ break;
+ }
+ case OP_TFORCALL: {
+ lua_assert(GET_OPCODE(*ci->u.l.savedpc) == OP_TFORLOOP);
+ L->top = ci->top; /* correct top */
+ break;
+ }
+ case OP_CALL: {
+ if (GETARG_C(inst) - 1 >= 0) /* nresults >= 0? */
+ L->top = ci->top; /* adjust results */
+ break;
+ }
+ case OP_TAILCALL: case OP_SETTABUP: case OP_SETTABLE:
+ break;
+ default: lua_assert(0);
+ }
+}
+
+
+
+
+/*
+** {==================================================================
+** Function 'luaV_execute': main interpreter loop
+** ===================================================================
+*/
+
+
+/*
+** some macros for common tasks in 'luaV_execute'
+*/
+
+
+#define RA(i) (base+GETARG_A(i))
+#define RB(i) check_exp(getBMode(GET_OPCODE(i)) == OpArgR, base+GETARG_B(i))
+#define RC(i) check_exp(getCMode(GET_OPCODE(i)) == OpArgR, base+GETARG_C(i))
+#define RKB(i) check_exp(getBMode(GET_OPCODE(i)) == OpArgK, \
+ ISK(GETARG_B(i)) ? k+INDEXK(GETARG_B(i)) : base+GETARG_B(i))
+#define RKC(i) check_exp(getCMode(GET_OPCODE(i)) == OpArgK, \
+ ISK(GETARG_C(i)) ? k+INDEXK(GETARG_C(i)) : base+GETARG_C(i))
+
+
+/* execute a jump instruction */
+#define dojump(ci,i,e) \
+ { int a = GETARG_A(i); \
+ if (a != 0) luaF_close(L, ci->u.l.base + a - 1); \
+ ci->u.l.savedpc += GETARG_sBx(i) + e; }
+
+/* for test instructions, execute the jump instruction that follows it */
+#define donextjump(ci) { i = *ci->u.l.savedpc; dojump(ci, i, 1); }
+
+
+#define Protect(x) { {x;}; base = ci->u.l.base; }
+
+#define checkGC(L,c) \
+ { luaC_condGC(L, L->top = (c), /* limit of live values */ \
+ Protect(L->top = ci->top)); /* restore top */ \
+ luai_threadyield(L); }
+
+
+/* fetch an instruction and prepare its execution */
+#define vmfetch() { \
+ i = *(ci->u.l.savedpc++); \
+ if (L->hookmask & (LUA_MASKLINE | LUA_MASKCOUNT)) \
+ Protect(luaG_traceexec(L)); \
+ ra = RA(i); /* WARNING: any stack reallocation invalidates 'ra' */ \
+ lua_assert(base == ci->u.l.base); \
+ lua_assert(base <= L->top && L->top < L->stack + L->stacksize); \
+}
+
+#define vmdispatch(o) switch(o)
+#define vmcase(l) case l:
+#define vmbreak break
+
+
+/*
+** copy of 'luaV_gettable', but protecting the call to potential
+** metamethod (which can reallocate the stack)
+*/
+#define gettableProtected(L,t,k,v) { const TValue *slot; \
+ if (luaV_fastget(L,t,k,slot,luaH_get)) { setobj2s(L, v, slot); } \
+ else Protect(luaV_finishget(L,t,k,v,slot)); }
+
+
+/* same for 'luaV_settable' */
+#define settableProtected(L,t,k,v) { const TValue *slot; \
+ if (!luaV_fastset(L,t,k,slot,luaH_get,v)) \
+ Protect(luaV_finishset(L,t,k,v,slot)); }
+
+
+
+void luaV_execute (lua_State *L) {
+ CallInfo *ci = L->ci;
+ LClosure *cl;
+ TValue *k;
+ StkId base;
+ ci->callstatus |= CIST_FRESH; /* fresh invocation of 'luaV_execute" */
+ newframe: /* reentry point when frame changes (call/return) */
+ lua_assert(ci == L->ci);
+ cl = clLvalue(ci->func); /* local reference to function's closure */
+ k = cl->p->k; /* local reference to function's constant table */
+ base = ci->u.l.base; /* local copy of function's base */
+ /* main loop of interpreter */
+ for (;;) {
+ Instruction i;
+ StkId ra;
+ vmfetch();
+ vmdispatch (GET_OPCODE(i)) {
+ vmcase(OP_MOVE) {
+ setobjs2s(L, ra, RB(i));
+ vmbreak;
+ }
+ vmcase(OP_LOADK) {
+ TValue *rb = k + GETARG_Bx(i);
+ setobj2s(L, ra, rb);
+ vmbreak;
+ }
+ vmcase(OP_LOADKX) {
+ TValue *rb;
+ lua_assert(GET_OPCODE(*ci->u.l.savedpc) == OP_EXTRAARG);
+ rb = k + GETARG_Ax(*ci->u.l.savedpc++);
+ setobj2s(L, ra, rb);
+ vmbreak;
+ }
+ vmcase(OP_LOADBOOL) {
+ setbvalue(ra, GETARG_B(i));
+ if (GETARG_C(i)) ci->u.l.savedpc++; /* skip next instruction (if C) */
+ vmbreak;
+ }
+ vmcase(OP_LOADNIL) {
+ int b = GETARG_B(i);
+ do {
+ setnilvalue(ra++);
+ } while (b--);
+ vmbreak;
+ }
+ vmcase(OP_GETUPVAL) {
+ int b = GETARG_B(i);
+ setobj2s(L, ra, cl->upvals[b]->v);
+ vmbreak;
+ }
+ vmcase(OP_GETTABUP) {
+ TValue *upval = cl->upvals[GETARG_B(i)]->v;
+ TValue *rc = RKC(i);
+ gettableProtected(L, upval, rc, ra);
+ vmbreak;
+ }
+ vmcase(OP_GETTABLE) {
+ StkId rb = RB(i);
+ TValue *rc = RKC(i);
+ gettableProtected(L, rb, rc, ra);
+ vmbreak;
+ }
+ vmcase(OP_SETTABUP) {
+ TValue *upval = cl->upvals[GETARG_A(i)]->v;
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ settableProtected(L, upval, rb, rc);
+ vmbreak;
+ }
+ vmcase(OP_SETUPVAL) {
+ UpVal *uv = cl->upvals[GETARG_B(i)];
+ setobj(L, uv->v, ra);
+ luaC_upvalbarrier(L, uv);
+ vmbreak;
+ }
+ vmcase(OP_SETTABLE) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ settableProtected(L, ra, rb, rc);
+ vmbreak;
+ }
+ vmcase(OP_NEWTABLE) {
+ int b = GETARG_B(i);
+ int c = GETARG_C(i);
+ Table *t = luaH_new(L);
+ sethvalue(L, ra, t);
+ if (b != 0 || c != 0)
+ luaH_resize(L, t, luaO_fb2int(b), luaO_fb2int(c));
+ checkGC(L, ra + 1);
+ vmbreak;
+ }
+ vmcase(OP_SELF) {
+ const TValue *aux;
+ StkId rb = RB(i);
+ TValue *rc = RKC(i);
+ TString *key = tsvalue(rc); /* key must be a string */
+ setobjs2s(L, ra + 1, rb);
+ if (luaV_fastget(L, rb, key, aux, luaH_getstr)) {
+ setobj2s(L, ra, aux);
+ }
+ else Protect(luaV_finishget(L, rb, rc, ra, aux));
+ vmbreak;
+ }
+ vmcase(OP_ADD) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (ttisinteger(rb) && ttisinteger(rc)) {
+ lua_Integer ib = ivalue(rb); lua_Integer ic = ivalue(rc);
+ setivalue(ra, intop(+, ib, ic));
+ }
+ else if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_numadd(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_ADD)); }
+ vmbreak;
+ }
+ vmcase(OP_SUB) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (ttisinteger(rb) && ttisinteger(rc)) {
+ lua_Integer ib = ivalue(rb); lua_Integer ic = ivalue(rc);
+ setivalue(ra, intop(-, ib, ic));
+ }
+ else if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_numsub(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_SUB)); }
+ vmbreak;
+ }
+ vmcase(OP_MUL) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (ttisinteger(rb) && ttisinteger(rc)) {
+ lua_Integer ib = ivalue(rb); lua_Integer ic = ivalue(rc);
+ setivalue(ra, intop(*, ib, ic));
+ }
+ else if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_nummul(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_MUL)); }
+ vmbreak;
+ }
+ vmcase(OP_DIV) { /* float division (always with floats) */
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_numdiv(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_DIV)); }
+ vmbreak;
+ }
+ vmcase(OP_BAND) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Integer ib; lua_Integer ic;
+ if (tointeger(rb, &ib) && tointeger(rc, &ic)) {
+ setivalue(ra, intop(&, ib, ic));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_BAND)); }
+ vmbreak;
+ }
+ vmcase(OP_BOR) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Integer ib; lua_Integer ic;
+ if (tointeger(rb, &ib) && tointeger(rc, &ic)) {
+ setivalue(ra, intop(|, ib, ic));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_BOR)); }
+ vmbreak;
+ }
+ vmcase(OP_BXOR) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Integer ib; lua_Integer ic;
+ if (tointeger(rb, &ib) && tointeger(rc, &ic)) {
+ setivalue(ra, intop(^, ib, ic));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_BXOR)); }
+ vmbreak;
+ }
+ vmcase(OP_SHL) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Integer ib; lua_Integer ic;
+ if (tointeger(rb, &ib) && tointeger(rc, &ic)) {
+ setivalue(ra, luaV_shiftl(ib, ic));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_SHL)); }
+ vmbreak;
+ }
+ vmcase(OP_SHR) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Integer ib; lua_Integer ic;
+ if (tointeger(rb, &ib) && tointeger(rc, &ic)) {
+ setivalue(ra, luaV_shiftl(ib, -ic));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_SHR)); }
+ vmbreak;
+ }
+ vmcase(OP_MOD) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (ttisinteger(rb) && ttisinteger(rc)) {
+ lua_Integer ib = ivalue(rb); lua_Integer ic = ivalue(rc);
+ setivalue(ra, luaV_mod(L, ib, ic));
+ }
+ else if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ lua_Number m;
+ luai_nummod(L, nb, nc, m);
+ setfltvalue(ra, m);
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_MOD)); }
+ vmbreak;
+ }
+ vmcase(OP_IDIV) { /* floor division */
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (ttisinteger(rb) && ttisinteger(rc)) {
+ lua_Integer ib = ivalue(rb); lua_Integer ic = ivalue(rc);
+ setivalue(ra, luaV_div(L, ib, ic));
+ }
+ else if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_numidiv(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_IDIV)); }
+ vmbreak;
+ }
+ vmcase(OP_POW) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ lua_Number nb; lua_Number nc;
+ if (tonumber(rb, &nb) && tonumber(rc, &nc)) {
+ setfltvalue(ra, luai_numpow(L, nb, nc));
+ }
+ else { Protect(luaT_trybinTM(L, rb, rc, ra, TM_POW)); }
+ vmbreak;
+ }
+ vmcase(OP_UNM) {
+ TValue *rb = RB(i);
+ lua_Number nb;
+ if (ttisinteger(rb)) {
+ lua_Integer ib = ivalue(rb);
+ setivalue(ra, intop(-, 0, ib));
+ }
+ else if (tonumber(rb, &nb)) {
+ setfltvalue(ra, luai_numunm(L, nb));
+ }
+ else {
+ Protect(luaT_trybinTM(L, rb, rb, ra, TM_UNM));
+ }
+ vmbreak;
+ }
+ vmcase(OP_BNOT) {
+ TValue *rb = RB(i);
+ lua_Integer ib;
+ if (tointeger(rb, &ib)) {
+ setivalue(ra, intop(^, ~l_castS2U(0), ib));
+ }
+ else {
+ Protect(luaT_trybinTM(L, rb, rb, ra, TM_BNOT));
+ }
+ vmbreak;
+ }
+ vmcase(OP_NOT) {
+ TValue *rb = RB(i);
+ int res = l_isfalse(rb); /* next assignment may change this value */
+ setbvalue(ra, res);
+ vmbreak;
+ }
+ vmcase(OP_LEN) {
+ Protect(luaV_objlen(L, ra, RB(i)));
+ vmbreak;
+ }
+ vmcase(OP_CONCAT) {
+ int b = GETARG_B(i);
+ int c = GETARG_C(i);
+ StkId rb;
+ L->top = base + c + 1; /* mark the end of concat operands */
+ Protect(luaV_concat(L, c - b + 1));
+ ra = RA(i); /* 'luaV_concat' may invoke TMs and move the stack */
+ rb = base + b;
+ setobjs2s(L, ra, rb);
+ checkGC(L, (ra >= rb ? ra + 1 : rb));
+ L->top = ci->top; /* restore top */
+ vmbreak;
+ }
+ vmcase(OP_JMP) {
+ dojump(ci, i, 0);
+ vmbreak;
+ }
+ vmcase(OP_EQ) {
+ TValue *rb = RKB(i);
+ TValue *rc = RKC(i);
+ Protect(
+ if (luaV_equalobj(L, rb, rc) != GETARG_A(i))
+ ci->u.l.savedpc++;
+ else
+ donextjump(ci);
+ )
+ vmbreak;
+ }
+ vmcase(OP_LT) {
+ Protect(
+ if (luaV_lessthan(L, RKB(i), RKC(i)) != GETARG_A(i))
+ ci->u.l.savedpc++;
+ else
+ donextjump(ci);
+ )
+ vmbreak;
+ }
+ vmcase(OP_LE) {
+ Protect(
+ if (luaV_lessequal(L, RKB(i), RKC(i)) != GETARG_A(i))
+ ci->u.l.savedpc++;
+ else
+ donextjump(ci);
+ )
+ vmbreak;
+ }
+ vmcase(OP_TEST) {
+ if (GETARG_C(i) ? l_isfalse(ra) : !l_isfalse(ra))
+ ci->u.l.savedpc++;
+ else
+ donextjump(ci);
+ vmbreak;
+ }
+ vmcase(OP_TESTSET) {
+ TValue *rb = RB(i);
+ if (GETARG_C(i) ? l_isfalse(rb) : !l_isfalse(rb))
+ ci->u.l.savedpc++;
+ else {
+ setobjs2s(L, ra, rb);
+ donextjump(ci);
+ }
+ vmbreak;
+ }
+ vmcase(OP_CALL) {
+ int b = GETARG_B(i);
+ int nresults = GETARG_C(i) - 1;
+ if (b != 0) L->top = ra+b; /* else previous instruction set top */
+ if (luaD_precall(L, ra, nresults)) { /* C function? */
+ if (nresults >= 0)
+ L->top = ci->top; /* adjust results */
+ Protect((void)0); /* update 'base' */
+ }
+ else { /* Lua function */
+ ci = L->ci;
+ goto newframe; /* restart luaV_execute over new Lua function */
+ }
+ vmbreak;
+ }
+ vmcase(OP_TAILCALL) {
+ int b = GETARG_B(i);
+ if (b != 0) L->top = ra+b; /* else previous instruction set top */
+ lua_assert(GETARG_C(i) - 1 == LUA_MULTRET);
+ if (luaD_precall(L, ra, LUA_MULTRET)) { /* C function? */
+ Protect((void)0); /* update 'base' */
+ }
+ else {
+ /* tail call: put called frame (n) in place of caller one (o) */
+ CallInfo *nci = L->ci; /* called frame */
+ CallInfo *oci = nci->previous; /* caller frame */
+ StkId nfunc = nci->func; /* called function */
+ StkId ofunc = oci->func; /* caller function */
+ /* last stack slot filled by 'precall' */
+ StkId lim = nci->u.l.base + getproto(nfunc)->numparams;
+ int aux;
+ /* close all upvalues from previous call */
+ if (cl->p->sizep > 0) luaF_close(L, oci->u.l.base);
+ /* move new frame into old one */
+ for (aux = 0; nfunc + aux < lim; aux++)
+ setobjs2s(L, ofunc + aux, nfunc + aux);
+ oci->u.l.base = ofunc + (nci->u.l.base - nfunc); /* correct base */
+ oci->top = L->top = ofunc + (L->top - nfunc); /* correct top */
+ oci->u.l.savedpc = nci->u.l.savedpc;
+ oci->callstatus |= CIST_TAIL; /* function was tail called */
+ ci = L->ci = oci; /* remove new frame */
+ lua_assert(L->top == oci->u.l.base + getproto(ofunc)->maxstacksize);
+ goto newframe; /* restart luaV_execute over new Lua function */
+ }
+ vmbreak;
+ }
+ vmcase(OP_RETURN) {
+ int b = GETARG_B(i);
+ if (cl->p->sizep > 0) luaF_close(L, base);
+ b = luaD_poscall(L, ci, ra, (b != 0 ? b - 1 : cast_int(L->top - ra)));
+ if (ci->callstatus & CIST_FRESH) /* local 'ci' still from callee */
+ return; /* external invocation: return */
+ else { /* invocation via reentry: continue execution */
+ ci = L->ci;
+ if (b) L->top = ci->top;
+ lua_assert(isLua(ci));
+ lua_assert(GET_OPCODE(*((ci)->u.l.savedpc - 1)) == OP_CALL);
+ goto newframe; /* restart luaV_execute over new Lua function */
+ }
+ }
+ vmcase(OP_FORLOOP) {
+ if (ttisinteger(ra)) { /* integer loop? */
+ lua_Integer step = ivalue(ra + 2);
+ lua_Integer idx = intop(+, ivalue(ra), step); /* increment index */
+ lua_Integer limit = ivalue(ra + 1);
+ if ((0 < step) ? (idx <= limit) : (limit <= idx)) {
+ ci->u.l.savedpc += GETARG_sBx(i); /* jump back */
+ chgivalue(ra, idx); /* update internal index... */
+ setivalue(ra + 3, idx); /* ...and external index */
+ }
+ }
+ else { /* floating loop */
+ lua_Number step = fltvalue(ra + 2);
+ lua_Number idx = luai_numadd(L, fltvalue(ra), step); /* inc. index */
+ lua_Number limit = fltvalue(ra + 1);
+ if (luai_numlt(0, step) ? luai_numle(idx, limit)
+ : luai_numle(limit, idx)) {
+ ci->u.l.savedpc += GETARG_sBx(i); /* jump back */
+ chgfltvalue(ra, idx); /* update internal index... */
+ setfltvalue(ra + 3, idx); /* ...and external index */
+ }
+ }
+ vmbreak;
+ }
+ vmcase(OP_FORPREP) {
+ TValue *init = ra;
+ TValue *plimit = ra + 1;
+ TValue *pstep = ra + 2;
+ lua_Integer ilimit;
+ int stopnow;
+ if (ttisinteger(init) && ttisinteger(pstep) &&
+ forlimit(plimit, &ilimit, ivalue(pstep), &stopnow)) {
+ /* all values are integer */
+ lua_Integer initv = (stopnow ? 0 : ivalue(init));
+ setivalue(plimit, ilimit);
+ setivalue(init, intop(-, initv, ivalue(pstep)));
+ }
+ else { /* try making all values floats */
+ lua_Number ninit; lua_Number nlimit; lua_Number nstep;
+ if (!tonumber(plimit, &nlimit))
+ luaG_runerror(L, "'for' limit must be a number");
+ setfltvalue(plimit, nlimit);
+ if (!tonumber(pstep, &nstep))
+ luaG_runerror(L, "'for' step must be a number");
+ setfltvalue(pstep, nstep);
+ if (!tonumber(init, &ninit))
+ luaG_runerror(L, "'for' initial value must be a number");
+ setfltvalue(init, luai_numsub(L, ninit, nstep));
+ }
+ ci->u.l.savedpc += GETARG_sBx(i);
+ vmbreak;
+ }
+ vmcase(OP_TFORCALL) {
+ StkId cb = ra + 3; /* call base */
+ setobjs2s(L, cb+2, ra+2);
+ setobjs2s(L, cb+1, ra+1);
+ setobjs2s(L, cb, ra);
+ L->top = cb + 3; /* func. + 2 args (state and index) */
+ Protect(luaD_call(L, cb, GETARG_C(i)));
+ L->top = ci->top;
+ i = *(ci->u.l.savedpc++); /* go to next instruction */
+ ra = RA(i);
+ lua_assert(GET_OPCODE(i) == OP_TFORLOOP);
+ goto l_tforloop;
+ }
+ vmcase(OP_TFORLOOP) {
+ l_tforloop:
+ if (!ttisnil(ra + 1)) { /* continue loop? */
+ setobjs2s(L, ra, ra + 1); /* save control variable */
+ ci->u.l.savedpc += GETARG_sBx(i); /* jump back */
+ }
+ vmbreak;
+ }
+ vmcase(OP_SETLIST) {
+ int n = GETARG_B(i);
+ int c = GETARG_C(i);
+ unsigned int last;
+ Table *h;
+ if (n == 0) n = cast_int(L->top - ra) - 1;
+ if (c == 0) {
+ lua_assert(GET_OPCODE(*ci->u.l.savedpc) == OP_EXTRAARG);
+ c = GETARG_Ax(*ci->u.l.savedpc++);
+ }
+ h = hvalue(ra);
+ last = ((c-1)*LFIELDS_PER_FLUSH) + n;
+ if (last > h->sizearray) /* needs more space? */
+ luaH_resizearray(L, h, last); /* preallocate it at once */
+ for (; n > 0; n--) {
+ TValue *val = ra+n;
+ luaH_setint(L, h, last--, val);
+ luaC_barrierback(L, h, val);
+ }
+ L->top = ci->top; /* correct top (in case of previous open call) */
+ vmbreak;
+ }
+ vmcase(OP_CLOSURE) {
+ Proto *p = cl->p->p[GETARG_Bx(i)];
+ LClosure *ncl = getcached(p, cl->upvals, base); /* cached closure */
+ if (ncl == NULL) /* no match? */
+ pushclosure(L, p, cl->upvals, base, ra); /* create a new one */
+ else
+ setclLvalue(L, ra, ncl); /* push cashed closure */
+ checkGC(L, ra + 1);
+ vmbreak;
+ }
+ vmcase(OP_VARARG) {
+ int b = GETARG_B(i) - 1; /* required results */
+ int j;
+ int n = cast_int(base - ci->func) - cl->p->numparams - 1;
+ if (n < 0) /* less arguments than parameters? */
+ n = 0; /* no vararg arguments */
+ if (b < 0) { /* B == 0? */
+ b = n; /* get all var. arguments */
+ Protect(luaD_checkstack(L, n));
+ ra = RA(i); /* previous call may change the stack */
+ L->top = ra + n;
+ }
+ for (j = 0; j < b && j < n; j++)
+ setobjs2s(L, ra + j, base - n + j);
+ for (; j < b; j++) /* complete required results with nil */
+ setnilvalue(ra + j);
+ vmbreak;
+ }
+ vmcase(OP_EXTRAARG) {
+ lua_assert(0);
+ vmbreak;
+ }
+ }
+ }
+}
+
+/* }================================================================== */
+
diff --git a/lua/src/lvm.h b/lua/src/lvm.h
new file mode 100644
index 000000000..422f87194
--- /dev/null
+++ b/lua/src/lvm.h
@@ -0,0 +1,113 @@
+/*
+** $Id: lvm.h,v 2.41 2016/12/22 13:08:50 roberto Exp $
+** Lua virtual machine
+** See Copyright Notice in lua.h
+*/
+
+#ifndef lvm_h
+#define lvm_h
+
+
+#include "ldo.h"
+#include "lobject.h"
+#include "ltm.h"
+
+
+#if !defined(LUA_NOCVTN2S)
+#define cvt2str(o) ttisnumber(o)
+#else
+#define cvt2str(o) 0 /* no conversion from numbers to strings */
+#endif
+
+
+#if !defined(LUA_NOCVTS2N)
+#define cvt2num(o) ttisstring(o)
+#else
+#define cvt2num(o) 0 /* no conversion from strings to numbers */
+#endif
+
+
+/*
+** You can define LUA_FLOORN2I if you want to convert floats to integers
+** by flooring them (instead of raising an error if they are not
+** integral values)
+*/
+#if !defined(LUA_FLOORN2I)
+#define LUA_FLOORN2I 0
+#endif
+
+
+#define tonumber(o,n) \
+ (ttisfloat(o) ? (*(n) = fltvalue(o), 1) : luaV_tonumber_(o,n))
+
+#define tointeger(o,i) \
+ (ttisinteger(o) ? (*(i) = ivalue(o), 1) : luaV_tointeger(o,i,LUA_FLOORN2I))
+
+#define intop(op,v1,v2) l_castU2S(l_castS2U(v1) op l_castS2U(v2))
+
+#define luaV_rawequalobj(t1,t2) luaV_equalobj(NULL,t1,t2)
+
+
+/*
+** fast track for 'gettable': if 't' is a table and 't[k]' is not nil,
+** return 1 with 'slot' pointing to 't[k]' (final result). Otherwise,
+** return 0 (meaning it will have to check metamethod) with 'slot'
+** pointing to a nil 't[k]' (if 't' is a table) or NULL (otherwise).
+** 'f' is the raw get function to use.
+*/
+#define luaV_fastget(L,t,k,slot,f) \
+ (!ttistable(t) \
+ ? (slot = NULL, 0) /* not a table; 'slot' is NULL and result is 0 */ \
+ : (slot = f(hvalue(t), k), /* else, do raw access */ \
+ !ttisnil(slot))) /* result not nil? */
+
+/*
+** standard implementation for 'gettable'
+*/
+#define luaV_gettable(L,t,k,v) { const TValue *slot; \
+ if (luaV_fastget(L,t,k,slot,luaH_get)) { setobj2s(L, v, slot); } \
+ else luaV_finishget(L,t,k,v,slot); }
+
+
+/*
+** Fast track for set table. If 't' is a table and 't[k]' is not nil,
+** call GC barrier, do a raw 't[k]=v', and return true; otherwise,
+** return false with 'slot' equal to NULL (if 't' is not a table) or
+** 'nil'. (This is needed by 'luaV_finishget'.) Note that, if the macro
+** returns true, there is no need to 'invalidateTMcache', because the
+** call is not creating a new entry.
+*/
+#define luaV_fastset(L,t,k,slot,f,v) \
+ (!ttistable(t) \
+ ? (slot = NULL, 0) \
+ : (slot = f(hvalue(t), k), \
+ ttisnil(slot) ? 0 \
+ : (luaC_barrierback(L, hvalue(t), v), \
+ setobj2t(L, cast(TValue *,slot), v), \
+ 1)))
+
+
+#define luaV_settable(L,t,k,v) { const TValue *slot; \
+ if (!luaV_fastset(L,t,k,slot,luaH_get,v)) \
+ luaV_finishset(L,t,k,v,slot); }
+
+
+
+LUAI_FUNC int luaV_equalobj (lua_State *L, const TValue *t1, const TValue *t2);
+LUAI_FUNC int luaV_lessthan (lua_State *L, const TValue *l, const TValue *r);
+LUAI_FUNC int luaV_lessequal (lua_State *L, const TValue *l, const TValue *r);
+LUAI_FUNC int luaV_tonumber_ (const TValue *obj, lua_Number *n);
+LUAI_FUNC int luaV_tointeger (const TValue *obj, lua_Integer *p, int mode);
+LUAI_FUNC void luaV_finishget (lua_State *L, const TValue *t, TValue *key,
+ StkId val, const TValue *slot);
+LUAI_FUNC void luaV_finishset (lua_State *L, const TValue *t, TValue *key,
+ StkId val, const TValue *slot);
+LUAI_FUNC void luaV_finishOp (lua_State *L);
+LUAI_FUNC void luaV_execute (lua_State *L);
+LUAI_FUNC void luaV_concat (lua_State *L, int total);
+LUAI_FUNC lua_Integer luaV_div (lua_State *L, lua_Integer x, lua_Integer y);
+LUAI_FUNC lua_Integer luaV_mod (lua_State *L, lua_Integer x, lua_Integer y);
+LUAI_FUNC lua_Integer luaV_shiftl (lua_Integer x, lua_Integer y);
+LUAI_FUNC void luaV_objlen (lua_State *L, StkId ra, const TValue *rb);
+
+#endif
diff --git a/lua/src/lzio.c b/lua/src/lzio.c
new file mode 100644
index 000000000..c9e1f491f
--- /dev/null
+++ b/lua/src/lzio.c
@@ -0,0 +1,68 @@
+/*
+** $Id: lzio.c,v 1.37 2015/09/08 15:41:05 roberto Exp $
+** Buffered streams
+** See Copyright Notice in lua.h
+*/
+
+#define lzio_c
+#define LUA_CORE
+
+#include "lprefix.h"
+
+
+#include <string.h>
+
+#include "lua.h"
+
+#include "llimits.h"
+#include "lmem.h"
+#include "lstate.h"
+#include "lzio.h"
+
+
+int luaZ_fill (ZIO *z) {
+ size_t size;
+ lua_State *L = z->L;
+ const char *buff;
+ lua_unlock(L);
+ buff = z->reader(L, z->data, &size);
+ lua_lock(L);
+ if (buff == NULL || size == 0)
+ return EOZ;
+ z->n = size - 1; /* discount char being returned */
+ z->p = buff;
+ return cast_uchar(*(z->p++));
+}
+
+
+void luaZ_init (lua_State *L, ZIO *z, lua_Reader reader, void *data) {
+ z->L = L;
+ z->reader = reader;
+ z->data = data;
+ z->n = 0;
+ z->p = NULL;
+}
+
+
+/* --------------------------------------------------------------- read --- */
+size_t luaZ_read (ZIO *z, void *b, size_t n) {
+ while (n) {
+ size_t m;
+ if (z->n == 0) { /* no bytes in buffer? */
+ if (luaZ_fill(z) == EOZ) /* try to read more */
+ return n; /* no more input; return number of missing bytes */
+ else {
+ z->n++; /* luaZ_fill consumed first byte; put it back */
+ z->p--;
+ }
+ }
+ m = (n <= z->n) ? n : z->n; /* min. between n and z->n */
+ memcpy(b, z->p, m);
+ z->n -= m;
+ z->p += m;
+ b = (char *)b + m;
+ n -= m;
+ }
+ return 0;
+}
+
diff --git a/lua/src/lzio.h b/lua/src/lzio.h
new file mode 100644
index 000000000..e7b6f34b1
--- /dev/null
+++ b/lua/src/lzio.h
@@ -0,0 +1,66 @@
+/*
+** $Id: lzio.h,v 1.31 2015/09/08 15:41:05 roberto Exp $
+** Buffered streams
+** See Copyright Notice in lua.h
+*/
+
+
+#ifndef lzio_h
+#define lzio_h
+
+#include "lua.h"
+
+#include "lmem.h"
+
+
+#define EOZ (-1) /* end of stream */
+
+typedef struct Zio ZIO;
+
+#define zgetc(z) (((z)->n--)>0 ? cast_uchar(*(z)->p++) : luaZ_fill(z))
+
+
+typedef struct Mbuffer {
+ char *buffer;
+ size_t n;
+ size_t buffsize;
+} Mbuffer;
+
+#define luaZ_initbuffer(L, buff) ((buff)->buffer = NULL, (buff)->buffsize = 0)
+
+#define luaZ_buffer(buff) ((buff)->buffer)
+#define luaZ_sizebuffer(buff) ((buff)->buffsize)
+#define luaZ_bufflen(buff) ((buff)->n)
+
+#define luaZ_buffremove(buff,i) ((buff)->n -= (i))
+#define luaZ_resetbuffer(buff) ((buff)->n = 0)
+
+
+#define luaZ_resizebuffer(L, buff, size) \
+ ((buff)->buffer = luaM_reallocvchar(L, (buff)->buffer, \
+ (buff)->buffsize, size), \
+ (buff)->buffsize = size)
+
+#define luaZ_freebuffer(L, buff) luaZ_resizebuffer(L, buff, 0)
+
+
+LUAI_FUNC void luaZ_init (lua_State *L, ZIO *z, lua_Reader reader,
+ void *data);
+LUAI_FUNC size_t luaZ_read (ZIO* z, void *b, size_t n); /* read next n bytes */
+
+
+
+/* --------- Private Part ------------------ */
+
+struct Zio {
+ size_t n; /* bytes still unread */
+ const char *p; /* current position in buffer */
+ lua_Reader reader; /* reader function */
+ void *data; /* additional data */
+ lua_State *L; /* Lua state (for reader) */
+};
+
+
+LUAI_FUNC int luaZ_fill (ZIO *z);
+
+#endif
diff --git a/src/Catalogue.cxx b/src/Catalogue.cxx
index c4f7a7eda..a9b268d37 100644
--- a/src/Catalogue.cxx
+++ b/src/Catalogue.cxx
@@ -131,6 +131,7 @@ int Scintilla_LinkLexers() {
LINK_LEXER(lmLiterateHaskell);
LINK_LEXER(lmLot);
LINK_LEXER(lmLout);
+ LINK_LEXER(lmLPeg);
LINK_LEXER(lmLua);
LINK_LEXER(lmMagikSF);
LINK_LEXER(lmMake);
diff --git a/win32/makefile b/win32/makefile
index 1af28c03f..f3b75d796 100644
--- a/win32/makefile
+++ b/win32/makefile
@@ -31,6 +31,7 @@ LEXLIB = Lexers.a
vpath %.h ../src ../include ../lexlib
vpath %.cxx ../src ../lexlib ../lexers
+vpath %.c ../lua/src
LDFLAGS=-shared -static -mwindows $(LDMINGW)
LIBS=-lgdi32 -luser32 -limm32 -lole32 -luuid -loleaut32 -lmsimg32 $(LIBSMINGW)
@@ -56,9 +57,22 @@ CXXFLAGS=-DNDEBUG -Os $(CXXBASEFLAGS)
STRIPFLAG=$(STRIPOPTION)
endif
+ifdef LPEG_LEXER
+CXXFLAGS+=-DLPEG_LEXER -I ../lua/src
+LUAOBJS:=lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o linit.o \
+ llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o \
+ ltable.o ltm.o lundump.o lvm.o lzio.o \
+ lauxlib.o lbaselib.o lbitlib.o lcorolib.o ldblib.o liolib.o \
+ lmathlib.o loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o \
+ lpcap.o lpcode.o lpprint.o lptree.o lpvm.o
+endif
+
.cxx.o:
$(CXX) $(CXXFLAGS) $(REFLAGS) -c $<
+.c.o:
+ $(CC) $(CFLAGS) -c $<
+
ALL: $(COMPONENT) $(LEXCOMPONENT) $(LEXLIB) ScintillaWinS.o
clean:
@@ -123,11 +137,12 @@ LOBJS = \
StyleContext.o \
WordList.o \
$(BASEOBJS) \
- $(LEXOBJS)
+ $(LEXOBJS) \
+ $(LUAOBJS)
$(LEXCOMPONENT): $(LOBJS) Scintilla.def
$(CXX) $(LDFLAGS) -o $@ $(STRIPFLAG) $(LOBJS) $(CXXFLAGS) $(LIBS)
-$(LEXLIB): $(LEXOBJS)
+$(LEXLIB): $(LEXOBJS) $(LUAOBJS)
$(AR) rc $@ $^
$(RANLIB) $@
diff --git a/win32/scintilla.mak b/win32/scintilla.mak
index c13eedc7d..aec630604 100644
--- a/win32/scintilla.mak
+++ b/win32/scintilla.mak
@@ -157,6 +157,7 @@ LEXOBJS=\
$(DIR_O)\LexLaTeX.obj \
$(DIR_O)\LexLisp.obj \
$(DIR_O)\LexLout.obj \
+ $(DIR_O)\LexLPeg.obj \
$(DIR_O)\LexLua.obj \
$(DIR_O)\LexMagik.obj \
$(DIR_O)\LexMake.obj \
@@ -607,6 +608,8 @@ $(DIR_O)\LexLisp.obj: ..\lexers\LexLisp.cxx $(LEX_HEADERS)
$(DIR_O)\LexLout.obj: ..\lexers\LexLout.cxx $(LEX_HEADERS)
+$(DIR_O)\LexLPeg.obj: ..\lexers\LexLPeg.cxx $(LEX_HEADERS)
+
$(DIR_O)\LexLua.obj: ..\lexers\LexLua.cxx $(LEX_HEADERS)
$(DIR_O)\LexMagik.obj: ..\lexers\LexMagik.cxx $(LEX_HEADERS)