diff options
| author | mitchell <unknown> | 2020-07-13 19:40:56 -0400 |
|---|---|---|
| committer | mitchell <unknown> | 2020-07-13 19:40:56 -0400 |
| commit | b76968a8e29f7851cbda33523db79da12a962b95 (patch) | |
| tree | 940cc59f09d6a53de1317946b9930dda51ce4ba5 /lexlua | |
| parent | c8415f6bb194477fcdd8a9f0c6f8551f1749e6b6 (diff) | |
| download | scintilla-mirror-b76968a8e29f7851cbda33523db79da12a962b95.tar.gz | |
lexlua: Deprecated some unused lexer patterns.
Diffstat (limited to 'lexlua')
| -rw-r--r-- | lexlua/html.lua | 2 | ||||
| -rw-r--r-- | lexlua/ledger.lua | 2 | ||||
| -rw-r--r-- | lexlua/lexer.lua | 34 | ||||
| -rw-r--r-- | lexlua/prolog.lua | 2 |
4 files changed, 19 insertions, 21 deletions
diff --git a/lexlua/html.lua b/lexlua/html.lua index 787566949..416fc69cd 100644 --- a/lexlua/html.lua +++ b/lexlua/html.lua @@ -134,7 +134,7 @@ local js_start_rule = #('<' * script_element * ('>' + P(function(input, index) end end))) * lex.embed_start_tag local js_end_rule = #('</' * script_element * ws^-1 * '>') * lex.embed_end_tag -local js_line_comment = '//' * (lexer.nonnewline_esc - js_end_rule)^0 +local js_line_comment = '//' * (lexer.nonnewline - js_end_rule)^0 local js_block_comment = '/*' * (lexer.any - '*/' - js_end_rule)^0 * P('*/')^-1 js:modify_rule('comment', token(lexer.COMMENT, js_line_comment + js_block_comment)) diff --git a/lexlua/ledger.lua b/lexlua/ledger.lua index 2daaab46a..b6751ec27 100644 --- a/lexlua/ledger.lua +++ b/lexlua/ledger.lua @@ -11,7 +11,7 @@ local delim = P('\t') + P(' ') -- Account. lex:add_rule('account', token(lexer.VARIABLE, lexer.starts_line(S(' \t')^1 * - (lexer.print - delim)^1))) + lexer.graph^1))) -- Amount. lex:add_rule('amount', token(lexer.NUMBER, delim * (1 - S(';\r\n'))^1)) diff --git a/lexlua/lexer.lua b/lexlua/lexer.lua index 3f24dd79c..f65fc15d4 100644 --- a/lexlua/lexer.lua +++ b/lexlua/lexer.lua @@ -133,16 +133,15 @@ local M = {} -- [`lexer.PREPROCESSOR`](), [`lexer.CONSTANT`](), [`lexer.VARIABLE`](), -- [`lexer.FUNCTION`](), [`lexer.CLASS`](), [`lexer.TYPE`](), [`lexer.LABEL`](), -- [`lexer.REGEX`](), and [`lexer.EMBEDDED`](). Patterns include --- [`lexer.any`](), [`lexer.ascii`](), [`lexer.extend`](), [`lexer.alpha`](), --- [`lexer.digit`](), [`lexer.alnum`](), [`lexer.lower`](), [`lexer.upper`](), --- [`lexer.xdigit`](), [`lexer.cntrl`](), [`lexer.graph`](), [`lexer.print`](), --- [`lexer.punct`](), [`lexer.space`](), [`lexer.newline`](), --- [`lexer.nonnewline`](), [`lexer.nonnewline_esc`](), [`lexer.dec_num`](), --- [`lexer.hex_num`](), [`lexer.oct_num`](), [`lexer.integer`](), --- [`lexer.float`](), [`lexer.number`](), and [`lexer.word`](). You may use your --- own token names if none of the above fit your language, but an advantage to --- using predefined token names is that your lexer's tokens will inherit the --- universal syntax highlighting color theme used by your text editor. +-- [`lexer.any`](), [`lexer.alpha`](), [`lexer.digit`](), [`lexer.alnum`](), +-- [`lexer.lower`](), [`lexer.upper`](), [`lexer.xdigit`](), [`lexer.graph`](), +-- [`lexer.print`](), [`lexer.punct`](), [`lexer.space`](), [`lexer.newline`](), +-- [`lexer.nonnewline`](), [`lexer.dec_num`](), [`lexer.hex_num`](), +-- [`lexer.oct_num`](), [`lexer.integer`](), [`lexer.float`](), +-- [`lexer.number`](), and [`lexer.word`](). You may use your own token names if +-- none of the above fit your language, but an advantage to using predefined +-- token names is that your lexer's tokens will inherit the universal syntax +-- highlighting color theme used by your text editor. -- -- ##### Example Tokens -- @@ -798,9 +797,6 @@ local M = {} -- A pattern that matches a sequence of end of line characters. -- @field nonnewline (pattern) -- A pattern that matches any single, non-newline character. --- @field nonnewline_esc (pattern) --- A pattern that matches any single, non-newline character or any set of end --- of line characters escaped with '\'. -- @field dec_num (pattern) -- A pattern that matches a decimal number. -- @field hex_num (pattern) @@ -1590,23 +1586,18 @@ end -- Common patterns. M.any = lpeg_P(1) -M.ascii = lpeg_R('\000\127') -M.extend = lpeg_R('\000\255') M.alpha = lpeg_R('AZ', 'az') M.digit = lpeg_R('09') M.alnum = lpeg_R('AZ', 'az', '09') M.lower = lpeg_R('az') M.upper = lpeg_R('AZ') M.xdigit = lpeg_R('09', 'AF', 'af') -M.cntrl = lpeg_R('\000\031') M.graph = lpeg_R('!~') -M.print = lpeg_R(' ~') M.punct = lpeg_R('!/', ':@', '[\'', '{~') M.space = lpeg_S('\t\v\f\n\r ') M.newline = lpeg_P('\r')^-1 * '\n' M.nonnewline = 1 - M.newline -M.nonnewline_esc = 1 - (M.newline + '\\') + '\\' * M.any M.dec_num = M.digit^1 M.hex_num = '0' * lpeg_S('xX') * M.xdigit^1 @@ -1620,6 +1611,13 @@ M.number = M.float + M.integer M.word = (M.alpha + '_') * (M.alnum + '_')^0 +-- Deprecated. +M.nonnewline_esc = 1 - (M.newline + '\\') + '\\' * M.any +M.ascii = lpeg_R('\000\127') +M.extend = lpeg_R('\000\255') +M.cntrl = lpeg_R('\000\031') +M.print = lpeg_R(' ~') + --- -- Creates and returns a token pattern with token name *name* and pattern -- *patt*. diff --git a/lexlua/prolog.lua b/lexlua/prolog.lua index c65748311..d12b5709c 100644 --- a/lexlua/prolog.lua +++ b/lexlua/prolog.lua @@ -308,7 +308,7 @@ lex:add_rule('bif', token(lexer.FUNCTION, word_match(bifs[dialect]) * #(P'('))) -- Numbers. local decimal_group = S('+-')^-1 * (lexer.digit + '_')^1 local binary_number = '0b' * (S('01') + '_')^1 -local character_code = '0\'' * S('\\')^-1 * (lexer.print - lexer.space) +local character_code = '0\'' * S('\\')^-1 * lexer.graph local decimal_number = decimal_group * ('.' * decimal_group)^-1 * ('e' * decimal_group)^-1 local hexadecimal_number = '0x' * (lexer.xdigit + '_')^1 |
