aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/awk.lua
diff options
context:
space:
mode:
authormitchell <unknown>2020-04-25 16:26:31 -0400
committermitchell <unknown>2020-04-25 16:26:31 -0400
commitfad15f79b1230b3076be515d6894c8919562809b (patch)
tree72c848ef02c3331de5ca54eff7adaea3a9a6fb88 /lexlua/awk.lua
parent1fd02a367dec125c0b49dd9246a0928433866b96 (diff)
downloadscintilla-mirror-fad15f79b1230b3076be515d6894c8919562809b.tar.gz
Reformatted Lua LPeg lexers and added new convenience functions and pattern.
`lexer.range()` replaces `lexer.delimited_range()` and `lexer.nested_pair()`. `lexer.to_eol()` replaces `patt * lexer.nonnewline^0` constructs. `lexer.number` replaces `lexer.float + lexer.integer`. Also added unit tests for lexer functions.
Diffstat (limited to 'lexlua/awk.lua')
-rw-r--r--lexlua/awk.lua31
1 files changed, 15 insertions, 16 deletions
diff --git a/lexlua/awk.lua b/lexlua/awk.lua
index 36329acc3..40ff501e7 100644
--- a/lexlua/awk.lua
+++ b/lexlua/awk.lua
@@ -160,7 +160,7 @@ local function scanString(input, index)
return i + 1
elseif input:sub(i, i) == BACKSLASH then
i = i + 1
- -- lexer.delimited_range() doesn't handle CRLF.
+ -- lexer.range() doesn't handle CRLF.
if input:sub(i, i + 1) == CRLF then i = i + 1 end
end
i = i + 1
@@ -229,18 +229,17 @@ lex:add_rule('comment', token(lexer.COMMENT, '#' * P(scanComment)))
lex:add_rule('string', token(lexer.STRING, DQUOTE * P(scanString)))
-- No leading sign because it might be binary.
-local float = ((lexer.digit^1 * ('.' * lexer.digit^0)^-1) +
- ('.' * lexer.digit^1)) *
- (S('eE') * S('+-')^-1 * lexer.digit^1)^-1
+local float =
+ ((lexer.digit^1 * ('.' * lexer.digit^0)^-1) + ('.' * lexer.digit^1)) *
+ (S('eE') * S('+-')^-1 * lexer.digit^1)^-1
-- Fields. E.g. $1, $a, $(x), $a(x), $a[x], $"1", $$a, etc.
-lex:add_rule('field',
- token('field', P('$') * S('$+-')^0 *
- (float +
- lexer.word^0 * '(' * P(scanFieldDelimiters) +
- lexer.word^1 * ('[' * P(scanFieldDelimiters))^-1 +
- '"' * P(scanString) +
- '/' * P(eatRegex) * '/')))
+lex:add_rule('field', token('field', P('$') * S('$+-')^0 * (
+ float +
+ lexer.word^0 * '(' * P(scanFieldDelimiters) +
+ lexer.word^1 * ('[' * P(scanFieldDelimiters))^-1 +
+ '"' * P(scanString) +
+ '/' * P(eatRegex) * '/')))
lex:add_style('field', lexer.STYLE_LABEL)
-- Regular expressions.
@@ -250,18 +249,18 @@ lex:add_style('field', lexer.STYLE_LABEL)
-- sequences like '\S', '\s' have special meanings with Gawk. Tokens that
-- contain them are displayed differently.
lex:add_rule('gawkRegex', token('gawkRegex', SLASH * P(scanGawkRegex)))
-lex:add_style('gawkRegex', lexer.STYLE_PREPROCESSOR..',underlined')
+lex:add_style('gawkRegex', lexer.STYLE_PREPROCESSOR .. ',underlined')
lex:add_rule('regex', token(lexer.REGEX, SLASH * P(scanRegex)))
-- Operators.
lex:add_rule('gawkOperator', token('gawkOperator', P("|&") + "@" + "**=" +
- "**"))
-lex:add_style('gawkOperator', lexer.STYLE_OPERATOR..',underlined')
+ "**"))
+lex:add_style('gawkOperator', lexer.STYLE_OPERATOR .. ',underlined')
lex:add_rule('operator', token(lexer.OPERATOR, S('!%&()*+,-/:;<=>?[\\]^{|}~')))
-- Numbers.
lex:add_rule('gawkNumber', token('gawkNumber', lexer.hex_num + lexer.oct_num))
-lex:add_style('gawkNumber', lexer.STYLE_NUMBER..',underlined')
+lex:add_style('gawkNumber', lexer.STYLE_NUMBER .. ',underlined')
lex:add_rule('number', token(lexer.NUMBER, float))
-- Keywords.
@@ -282,7 +281,7 @@ lex:add_rule('gawkBuiltInVariable', token('gawkBuiltInVariable', word_match[[
ARGIND BINMODE ERRNO FIELDWIDTHS FPAT FUNCTAB IGNORECASE LINT PREC PROCINFO
ROUNDMODE RT SYMTAB TEXTDOMAIN
]]))
-lex:add_style('gawkBuiltInVariable', lexer.STYLE_CONSTANT..',underlined')
+lex:add_style('gawkBuiltInVariable', lexer.STYLE_CONSTANT .. ',underlined')
-- Functions.
lex:add_rule('function', token(lexer.FUNCTION, lexer.word * #P('(')))