diff options
author | archimed <[email protected]> | 2014-07-23 09:57:13 +0400 |
---|---|---|
committer | archimed <[email protected]> | 2014-07-24 09:44:27 +0400 |
commit | c9bc5c944bf064727385d071e9331f521a2b1447 (patch) | |
tree | 2ad0c2f24b3e1c2823c6236b7badeb079ba51e11 /lib/parsetools | |
parent | 10414ad49c1dce62e1022c80c0ed6edd45abc20d (diff) | |
download | otp-c9bc5c944bf064727385d071e9331f521a2b1447.tar.gz otp-c9bc5c944bf064727385d071e9331f521a2b1447.tar.bz2 otp-c9bc5c944bf064727385d071e9331f521a2b1447.zip |
Fix line counting in token and tokens functions
The line counter becomes invalid, when the rules with linewrap are
used. This issue appears, because the parsing FSM does not rollback
the line counter after attempting such rule.
Unit tests for 'token' and 'tokens' are also added.
Diffstat (limited to 'lib/parsetools')
-rw-r--r-- | lib/parsetools/include/leexinc.hrl | 8 | ||||
-rw-r--r-- | lib/parsetools/test/leex_SUITE.erl | 17 |
2 files changed, 19 insertions, 6 deletions
diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl index 7a0d65e28e..938aef58f9 100644 --- a/lib/parsetools/include/leexinc.hrl +++ b/lib/parsetools/include/leexinc.hrl @@ -105,8 +105,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) -> {reject,_Alen1,Tlen1,Ics1,L1,_S1} -> % No token match Error = {Tline,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}}, {done,{error,Error,L1},Ics1}; - {A1,Alen1,_Tlen1,_Ics1,L1,_S1} -> % Use last accept match - token_cont(yysuf(Tcs, Alen1), L1, yyaction(A1, Alen1, Tcs, Tline)) + {A1,Alen1,_Tlen1,_Ics1,_L1,_S1} -> % Use last accept match + token_cont(yysuf(Tcs, Alen1), L0, yyaction(A1, Alen1, Tcs, Tline)) end. %% token_cont(RestChars, Line, Token) @@ -177,9 +177,9 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) -> %% Skip rest of tokens. Error = {L1,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}}, skip_tokens(yysuf(Tcs, Tlen1+1), L1, Error); - {A1,Alen1,_Tlen1,_Ics1,L1,_S1} -> + {A1,Alen1,_Tlen1,_Ics1,_L1,_S1} -> Token = yyaction(A1, Alen1, Tcs, Tline), - tokens_cont(yysuf(Tcs, Alen1), L1, Token, Ts) + tokens_cont(yysuf(Tcs, Alen1), L0, Token, Ts) end. %% tokens_cont(RestChars, Line, Token, Tokens) diff --git a/lib/parsetools/test/leex_SUITE.erl b/lib/parsetools/test/leex_SUITE.erl index 44a60aff88..6d2afe061e 100644 --- a/lib/parsetools/test/leex_SUITE.erl +++ b/lib/parsetools/test/leex_SUITE.erl @@ -888,8 +888,8 @@ Erlang code. XrlFile = filename:join(Dir, "test_line_wrap.xrl"), ?line ok = file:write_file(XrlFile, Xrl), ErlFile = filename:join(Dir, "test_line_wrap.erl"), - ?line {ok, _} = leex:file(XrlFile, []), - ?line {ok, _} = compile:file(ErlFile, [{outdir,Dir}]), + {ok, _} = leex:file(XrlFile, []), + {ok, _} = compile:file(ErlFile, [{outdir,Dir}]), code:purge(test_line_wrap), AbsFile = filename:rootname(ErlFile, ".erl"), code:load_abs(AbsFile, test_line_wrap), @@ -897,6 +897,19 @@ Erlang code. S = "aaa\naaa", {ok,[{second,1},{second,2}],2} = test_line_wrap:string(S) end(), + fun() -> + S = "aaa\naaa", + {ok,[{second,3},{second,4}],4} = test_line_wrap:string(S, 3) + end(), + fun() -> + {done,{ok,{second,1},1},"\na"} = test_line_wrap:token([], "a\na"), + {more,Cont1} = test_line_wrap:token([], "\na"), + {done,{ok,{second,2},2},eof} = test_line_wrap:token(Cont1, eof) + end(), + fun() -> + {more,Cont1} = test_line_wrap:tokens([], "a\na"), + {done,{ok,[{second,1},{second,2}],2},eof} = test_line_wrap:tokens(Cont1, eof) + end(), ok. %% End of line_wrap |