aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/markdown.lua
diff options
context:
space:
mode:
Diffstat (limited to 'lexlua/markdown.lua')
-rw-r--r--lexlua/markdown.lua131
1 files changed, 55 insertions, 76 deletions
diff --git a/lexlua/markdown.lua b/lexlua/markdown.lua
index e4bba29a0..cac5c8322 100644
--- a/lexlua/markdown.lua
+++ b/lexlua/markdown.lua
@@ -8,58 +8,40 @@ local P, R, S = lpeg.P, lpeg.R, lpeg.S
local lex = lexer.new('markdown')
-- Block elements.
-lex:add_rule('header',
- token('h6', lexer.starts_line('######') * lexer.nonnewline^0) +
- token('h5', lexer.starts_line('#####') * lexer.nonnewline^0) +
- token('h4', lexer.starts_line('####') * lexer.nonnewline^0) +
- token('h3', lexer.starts_line('###') * lexer.nonnewline^0) +
- token('h2', lexer.starts_line('##') * lexer.nonnewline^0) +
- token('h1', lexer.starts_line('#') * lexer.nonnewline^0))
-local font_size = lexer.property_int['fontsize'] > 0 and
- lexer.property_int['fontsize'] or 10
-local hstyle = 'fore:$(color.red)'
-lex:add_style('h6', hstyle)
-lex:add_style('h5', hstyle..',size:'..(font_size + 1))
-lex:add_style('h4', hstyle..',size:'..(font_size + 2))
-lex:add_style('h3', hstyle..',size:'..(font_size + 3))
-lex:add_style('h2', hstyle..',size:'..(font_size + 4))
-lex:add_style('h1', hstyle..',size:'..(font_size + 5))
-
-lex:add_rule('blockquote',
- token(lexer.STRING,
- lpeg.Cmt(lexer.starts_line(S(' \t')^0 * '>'),
- function(input, index)
- local _, e = input:find('\n[ \t]*\r?\n', index)
- return (e or #input) + 1
- end)))
-
-lex:add_rule('list', token('list', lexer.starts_line(S(' \t')^0 * (S('*+-') +
- R('09')^1 * '.')) *
- S(' \t')))
+local function h(n)
+ return token('h' .. n, lexer.to_eol(lexer.starts_line(string.rep('#', n))))
+end
+lex:add_rule('header', h(6) + h(5) + h(4) + h(3) + h(2) + h(1))
+local function add_header_style(n)
+ local font_size = lexer.property_int['fontsize'] > 0 and
+ lexer.property_int['fontsize'] or 10
+ lex:add_style('h' .. n, 'fore:$(color.red),size:' .. (font_size + (6 - n)))
+end
+for i = 1, 6 do add_header_style(i) end
+
+lex:add_rule('blockquote', token(lexer.STRING,
+ lpeg.Cmt(lexer.starts_line(S(' \t')^0 * '>'), function(input, index)
+ local _, e = input:find('\n[ \t]*\r?\n', index)
+ return (e or #input) + 1
+ end)))
+
+lex:add_rule('list', token('list',
+ lexer.starts_line(S(' \t')^0 * (S('*+-') + R('09')^1 * '.')) * S(' \t')))
lex:add_style('list', lexer.STYLE_CONSTANT)
-lex:add_rule('block_code',
- token('code', lexer.starts_line(P(' ')^4 + P('\t')) * -P('<') *
- lexer.nonnewline^0 * lexer.newline^-1) +
- token('code', lexer.starts_line(P('```')) * (lexer.any - '```')^0 *
- P('```')^-1))
-lex:add_rule('inline_code',
- token('code', P('``') * (lexer.any - '``')^0 * P('``')^-1 +
- lexer.delimited_range('`', false, true)))
-lex:add_style('code', lexer.STYLE_EMBEDDED..',eolfilled')
-
-lex:add_rule('hr',
- token('hr',
- lpeg.Cmt(lexer.starts_line(S(' \t')^0 * lpeg.C(S('*-_'))),
- function(input, index, c)
- local line = input:match('[^\r\n]*', index)
- line = line:gsub('[ \t]', '')
- if line:find('[^'..c..']') or #line < 2 then
- return nil
- end
- return (select(2, input:find('\r?\n', index)) or
- #input) + 1
- end)))
+local code_line = lexer.to_eol(lexer.starts_line(P(' ')^4 + '\t') * -P('<')) *
+ lexer.newline^-1
+local code_block = lexer.range(lexer.starts_line('```'), '```')
+local code_inline = lexer.range('``') + lexer.range('`', false, false)
+lex:add_rule('block_code', token('code', code_line + code_block + code_inline))
+lex:add_style('code', lexer.STYLE_EMBEDDED .. ',eolfilled')
+
+lex:add_rule('hr', token('hr', lpeg.Cmt(
+ lexer.starts_line(S(' \t')^0 * lpeg.C(S('*-_'))), function(input, index, c)
+ local line = input:match('[^\r\n]*', index):gsub('[ \t]', '')
+ if line:find('[^' .. c .. ']') or #line < 2 then return nil end
+ return (select(2, input:find('\r?\n', index)) or #input) + 1
+ end)))
lex:add_style('hr', 'back:$(color.black),eolfilled')
-- Whitespace.
@@ -69,23 +51,22 @@ lex:add_rule('whitespace', ws)
-- Span elements.
lex:add_rule('escape', token(lexer.DEFAULT, P('\\') * 1))
-lex:add_rule('link_label',
- token('link_label', lexer.delimited_range('[]') * ':') * ws *
- token('link_url', (lexer.any - lexer.space)^1) *
- (ws * token(lexer.STRING, lexer.delimited_range('"', false, true) +
- lexer.delimited_range("'", false, true) +
- lexer.delimited_range('()')))^-1)
+local ref_link_label = token('link_label', lexer.range('[', ']', true) * ':')
+local ref_link_url = token('link_url', (lexer.any - lexer.space)^1)
+local ref_link_title = token(lexer.STRING, lexer.range('"', true, false) +
+ lexer.range("'", true, false) + lexer.range('(', ')', true))
+lex:add_rule('link_label', ref_link_label * ws * ref_link_url *
+ (ws * ref_link_title)^-1)
lex:add_style('link_label', lexer.STYLE_LABEL)
lex:add_style('link_url', 'underlined')
-lex:add_rule('link',
- token('link', P('!')^-1 * lexer.delimited_range('[]') *
- (P('(') * (lexer.any - S(') \t'))^0 *
- (S(' \t')^1 *
- lexer.delimited_range('"', false, true))^-1 * ')' +
- S(' \t')^0 * lexer.delimited_range('[]')) +
- 'http' * P('s')^-1 * '://' *
- (lexer.any - lexer.space)^1))
+local link_label = P('!')^-1 * lexer.range('[', ']', true)
+local link_target = P('(') * (lexer.any - S(') \t'))^0 *
+ (S(' \t')^1 * lexer.range('"', false, false))^-1 * ')'
+local link_ref = S(' \t')^0 * lexer.range('[', ']', true)
+local link_url = 'http' * P('s')^-1 * '://' * (lexer.any - lexer.space)^1
+lex:add_rule('link', token('link', link_label * (link_target + link_ref) +
+ link_url))
lex:add_style('link', 'underlined')
local punct_space = lexer.punct + lexer.space
@@ -96,29 +77,27 @@ local punct_space = lexer.punct + lexer.space
local function flanked_range(s, not_inword)
local fl_char = lexer.any - s - lexer.space
local left_fl = lpeg.B(punct_space - s) * s * #fl_char +
- s * #(fl_char - lexer.punct)
+ s * #(fl_char - lexer.punct)
local right_fl = lpeg.B(lexer.punct) * s * #(punct_space - s) +
- lpeg.B(fl_char) * s
+ lpeg.B(fl_char) * s
return left_fl * (lexer.any - (not_inword and s * #punct_space or s))^0 *
- right_fl
+ right_fl
end
-lex:add_rule('strong',
- token('strong', flanked_range('**') +
- (lpeg.B(punct_space) + #lexer.starts_line('_')) *
- flanked_range('__', true) * #(punct_space + -1)))
+lex:add_rule('strong', token('strong', flanked_range('**') +
+ (lpeg.B(punct_space) + #lexer.starts_line('_')) * flanked_range('__', true) *
+ #(punct_space + -1)))
lex:add_style('strong', 'bold')
-lex:add_rule('em',
- token('em', flanked_range('*') +
- (lpeg.B(punct_space) + #lexer.starts_line('_')) *
- flanked_range('_', true) * #(punct_space + -1)))
+lex:add_rule('em', token('em', flanked_range('*') +
+ (lpeg.B(punct_space) + #lexer.starts_line('_')) * flanked_range('_', true) *
+ #(punct_space + -1)))
lex:add_style('em', 'italics')
-- Embedded HTML.
local html = lexer.load('html')
local start_rule = lexer.starts_line(S(' \t')^0) * #P('<') *
- html:get_rule('element')
+ html:get_rule('element')
local end_rule = token(lexer.DEFAULT, P('\n')) -- TODO: lexer.WHITESPACE errors
lex:embed(html, start_rule, end_rule)