aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/pike.lua
blob: 62c4d58e40e772e3995abfc697fd28ef138b55aa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
-- Copyright 2006-2020 Mitchell mitchell.att.foicica.com. See License.txt.
-- Pike LPeg lexer.

local lexer = require('lexer')
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('pike')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[
  break case catch continue default do else for foreach gauge if lambda return
  sscanf switch while import inherit
  -- Type modifiers.
  constant extern final inline local nomask optional private protected public
  static variant
]]))

-- Types.
lex:add_rule('type', token(lexer.TYPE, word_match[[
  array class float function int mapping mixed multiset object program string
  void
]]))

-- Identifiers.
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))

-- Strings.
local sq_str = lexer.range("'", true)
local dq_str = P('#')^-1 * lexer.range('"', true)
lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))

-- Comments.
local line_comment = lexer.to_eol('//', true)
local block_comment = lexer.range('/*', '*/', false, false, true)
lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))

-- Numbers.
lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfF')^-1))

-- Preprocessors.
lex:add_rule('preprocessor', token(lexer.PREPROCESSOR,
  lexer.to_eol(lexer.starts_line('#'))))

-- Operators.
lex:add_rule('operator', token(lexer.OPERATOR, S('<>=!+-/*%&|^~@`.,:;()[]{}')))

-- Fold points.
lex:add_fold_point(lexer.OPERATOR, '{', '}')
lex:add_fold_point(lexer.COMMENT, '/*', '*/')
lex:add_fold_point(lexer.COMMENT, lexer.fold_consecutive_lines('//'))

return lex