aboutsummaryrefslogtreecommitdiffstats
path: root/lib/parsetools/test
diff options
context:
space:
mode:
authorarchimed <[email protected]>2014-07-23 09:57:13 +0400
committerarchimed <[email protected]>2014-07-24 09:44:27 +0400
commitc9bc5c944bf064727385d071e9331f521a2b1447 (patch)
tree2ad0c2f24b3e1c2823c6236b7badeb079ba51e11 /lib/parsetools/test
parent10414ad49c1dce62e1022c80c0ed6edd45abc20d (diff)
downloadotp-c9bc5c944bf064727385d071e9331f521a2b1447.tar.gz
otp-c9bc5c944bf064727385d071e9331f521a2b1447.tar.bz2
otp-c9bc5c944bf064727385d071e9331f521a2b1447.zip
Fix line counting in token and tokens functions
The line counter becomes invalid, when the rules with linewrap are used. This issue appears, because the parsing FSM does not rollback the line counter after attempting such rule. Unit tests for 'token' and 'tokens' are also added.
Diffstat (limited to 'lib/parsetools/test')
-rw-r--r--lib/parsetools/test/leex_SUITE.erl17
1 files changed, 15 insertions, 2 deletions
diff --git a/lib/parsetools/test/leex_SUITE.erl b/lib/parsetools/test/leex_SUITE.erl
index 44a60aff88..6d2afe061e 100644
--- a/lib/parsetools/test/leex_SUITE.erl
+++ b/lib/parsetools/test/leex_SUITE.erl
@@ -888,8 +888,8 @@ Erlang code.
XrlFile = filename:join(Dir, "test_line_wrap.xrl"),
?line ok = file:write_file(XrlFile, Xrl),
ErlFile = filename:join(Dir, "test_line_wrap.erl"),
- ?line {ok, _} = leex:file(XrlFile, []),
- ?line {ok, _} = compile:file(ErlFile, [{outdir,Dir}]),
+ {ok, _} = leex:file(XrlFile, []),
+ {ok, _} = compile:file(ErlFile, [{outdir,Dir}]),
code:purge(test_line_wrap),
AbsFile = filename:rootname(ErlFile, ".erl"),
code:load_abs(AbsFile, test_line_wrap),
@@ -897,6 +897,19 @@ Erlang code.
S = "aaa\naaa",
{ok,[{second,1},{second,2}],2} = test_line_wrap:string(S)
end(),
+ fun() ->
+ S = "aaa\naaa",
+ {ok,[{second,3},{second,4}],4} = test_line_wrap:string(S, 3)
+ end(),
+ fun() ->
+ {done,{ok,{second,1},1},"\na"} = test_line_wrap:token([], "a\na"),
+ {more,Cont1} = test_line_wrap:token([], "\na"),
+ {done,{ok,{second,2},2},eof} = test_line_wrap:token(Cont1, eof)
+ end(),
+ fun() ->
+ {more,Cont1} = test_line_wrap:tokens([], "a\na"),
+ {done,{ok,[{second,1},{second,2}],2},eof} = test_line_wrap:tokens(Cont1, eof)
+ end(),
ok.
%% End of line_wrap