def tokenize input
s = StringScanner.new input
@line = 0
@line_pos = 0
until s.eos? do
pos = s.pos
next if s.scan(/ +/)
@tokens << case
when s.scan(/\r?\n/) then
token = [:NEWLINE, s.matched, *token_pos(pos)]
@line_pos = s.pos
@line += 1
token
when s.scan(/(=+)(\s*)/) then
level = s[1].length
header = [:HEADER, level, *token_pos(pos)]
if s[2] =~ /^\r?\n/ then
s.pos -= s[2].length
header
else
pos = s.pos
s.scan(/.*/)
@tokens << header
[:TEXT, s.matched.sub(/\r$/, ''), *token_pos(pos)]
end
when s.scan(/(-{3,}) *$/) then
[:RULE, s[1].length - 2, *token_pos(pos)]
when s.scan(/([*-]) +(\S)/) then
s.pos -= s[2].bytesize
[:BULLET, s[1], *token_pos(pos)]
when s.scan(/([a-z]|\d+)\. +(\S)/) then
list_label = s[1]
s.pos -= s[2].bytesize
list_type =
case list_label
when /[a-z]/ then :LALPHA
when /[A-Z]/ then :UALPHA
when /\d/ then :NUMBER
else
raise ParseError, "BUG token #{list_label}"
end
[list_type, list_label, *token_pos(pos)]
when s.scan(/\[(.*?)\]( +|$)/) then
[:LABEL, s[1], *token_pos(pos)]
when s.scan(/(.*?)::( +|$)/) then
[:NOTE, s[1], *token_pos(pos)]
else s.scan(/.*/)
[:TEXT, s.matched.sub(/\r$/, ''), *token_pos(pos)]
end
end
self
end