aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/prolog.lua
diff options
context:
space:
mode:
Diffstat (limited to 'lexlua/prolog.lua')
-rw-r--r--lexlua/prolog.lua38
1 files changed, 18 insertions, 20 deletions
diff --git a/lexlua/prolog.lua b/lexlua/prolog.lua
index 79a3fbe10..c65748311 100644
--- a/lexlua/prolog.lua
+++ b/lexlua/prolog.lua
@@ -31,7 +31,7 @@ local P, R, S, B, V, C = lpeg.P, lpeg.R, lpeg.S, lpeg.B, lpeg.V, lpeg.C
local lex = lexer.new('prolog')
local dialects = setmetatable({gprolog = 'gprolog', swipl = 'swipl'},
- {__index = function(_, _) return 'iso' end})
+ {__index = function(_, _) return 'iso' end})
local dialect = dialects[lexer.property['prolog.dialect']]
-- Directives.
@@ -71,10 +71,10 @@ directives.swipl = directives.iso .. [[
module multifile op reexport thread_local use_module volatile
]]
lex:add_rule('directive',
- token(lexer.WHITESPACE, lexer.starts_line(S(' \t'))^0) *
- token(lexer.OPERATOR, P':-') *
- token(lexer.WHITESPACE, S(' \t')^0) *
- token(lexer.PREPROCESSOR, P(word_match(directives[dialect]))))
+ token(lexer.WHITESPACE, lexer.starts_line(S(' \t'))^0) *
+ token(lexer.OPERATOR, P':-') *
+ token(lexer.WHITESPACE, S(' \t')^0) *
+ token(lexer.PREPROCESSOR, P(word_match(directives[dialect]))))
-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
@@ -278,10 +278,9 @@ one_plus_arity_keywords.swipl = [[
set_random prolog_stack_property put_char unload_file nb_setval put_byte
current_signal put_code write_length string read_string text_to_string
]]
-lex:add_rule('keyword',
- token(lexer.KEYWORD, word_match(zero_arity_keywords[dialect]) +
- (word_match(one_plus_arity_keywords[dialect]) *
- #(P'('))))
+lex:add_rule('keyword', token(lexer.KEYWORD,
+ word_match(zero_arity_keywords[dialect]) +
+ (word_match(one_plus_arity_keywords[dialect]) * #(P'('))))
-- BIFs.
local bifs = {}
@@ -311,16 +310,15 @@ local decimal_group = S('+-')^-1 * (lexer.digit + '_')^1
local binary_number = '0b' * (S('01') + '_')^1
local character_code = '0\'' * S('\\')^-1 * (lexer.print - lexer.space)
local decimal_number = decimal_group * ('.' * decimal_group)^-1 *
- ('e' * decimal_group)^-1
+ ('e' * decimal_group)^-1
local hexadecimal_number = '0x' * (lexer.xdigit + '_')^1
local octal_number = '0o' * (S('01234567') + '_')^1
lex:add_rule('number', token(lexer.NUMBER, character_code + binary_number +
- hexadecimal_number + octal_number +
- decimal_number))
+ hexadecimal_number + octal_number + decimal_number))
-- Comments.
-local line_comment = '%' * lexer.nonnewline^0
-local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1
+local line_comment = lexer.to_eol('%')
+local block_comment = lexer.range('/*', '*/')
lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
-- Operators.
@@ -338,18 +336,18 @@ operators.swipl = [[
initialization rem
]]
lex:add_rule('operator', token(lexer.OPERATOR, word_match(operators[dialect]) +
- S('-!+\\|=:;&<>()[]{}/*^@?.')))
+ S('-!+\\|=:;&<>()[]{}/*^@?.')))
-- Variables.
-lex:add_rule('variable',
- token(lexer.VARIABLE, (lexer.upper + '_') *
- (lexer.word^1 + lexer.digit^1 + P('_')^1)^0))
+lex:add_rule('variable', token(lexer.VARIABLE, (lexer.upper + '_') *
+ (lexer.word^1 + lexer.digit^1 + P('_')^1)^0))
-- Identifiers.
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
-- Strings.
-lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
- lexer.delimited_range('"', true)))
+local sq_str = lexer.range("'", true)
+local dq_str = lexer.range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
return lex