diff options
author | mitchell <unknown> | 2018-03-11 23:04:41 -0400 |
---|---|---|
committer | mitchell <unknown> | 2018-03-11 23:04:41 -0400 |
commit | 519b7328b66c4c84f03893a31e4be5ba6b1395f2 (patch) | |
tree | 2055cd79006357e94c185f341d0df17b9a8769eb /lexlua/ps.lua | |
parent | c0373e036e965a70045971e2abc582cb4bf12a4e (diff) | |
download | scintilla-mirror-519b7328b66c4c84f03893a31e4be5ba6b1395f2.tar.gz |
Added optional Lua lexer support.
This support is disabled by default and must be enabled via compile-time option.
Diffstat (limited to 'lexlua/ps.lua')
-rw-r--r-- | lexlua/ps.lua | 47 |
1 files changed, 47 insertions, 0 deletions
diff --git a/lexlua/ps.lua b/lexlua/ps.lua new file mode 100644 index 000000000..a6a211dd7 --- /dev/null +++ b/lexlua/ps.lua @@ -0,0 +1,47 @@ +-- Copyright 2006-2018 Mitchell mitchell.att.foicica.com. See License.txt. +-- Postscript LPeg lexer. + +local lexer = require('lexer') +local token, word_match = lexer.token, lexer.word_match +local P, R, S = lpeg.P, lpeg.R, lpeg.S + +local lex = lexer.new('ps') + +-- Whitespace. +lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) + +-- Keywords. +lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[ + pop exch dup copy roll clear count mark cleartomark counttomark exec if ifelse + for repeat loop exit stop stopped countexecstack execstack quit start + true false NULL +]])) + +-- Functions. +lex:add_rule('function', token(lexer.FUNCTION, word_match[[ + add div idiv mod mul sub abs ned ceiling floor round truncate sqrt atan cos + sin exp ln log rand srand rrand +]])) + +-- Identifiers. +local word = (lexer.alpha + '-') * (lexer.alnum + '-')^0 +lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) + +-- Strings. +local arrow_string = lexer.delimited_range('<>') +local nested_string = lexer.delimited_range('()', false, false, true) +lex:add_rule('string', token(lexer.STRING, arrow_string + nested_string)) + +-- Comments. +lex:add_rule('comment', token(lexer.COMMENT, '%' * lexer.nonnewline^0)) + +-- Numbers. +lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer)) + +-- Labels. +lex:add_rule('label', token(lexer.LABEL, '/' * word)) + +-- Operators. +lex:add_rule('operator', token(lexer.OPERATOR, S('[]{}'))) + +return lex |