aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorBruce Yinhe <[email protected]>2014-10-13 15:10:02 +0200
committerBruce Yinhe <[email protected]>2014-10-13 15:10:02 +0200
commit217602ba670d458ab289b3960a6ca9441a5ae189 (patch)
treedbeeaf0f1c30ba5891bc02364ea2445c1fa3598d /lib
parent54a6b141cba05d89c5c376e40a403f217db6c46b (diff)
parentc9bc5c944bf064727385d071e9331f521a2b1447 (diff)
downloadotp-217602ba670d458ab289b3960a6ca9441a5ae189.tar.gz
otp-217602ba670d458ab289b3960a6ca9441a5ae189.tar.bz2
otp-217602ba670d458ab289b3960a6ca9441a5ae189.zip
Merge branch 'archimed-shaman/leex_line_wrap_issue' into maint
OTP-12238 * archimed-shaman/leex_line_wrap_issue: Fix line counting in token and tokens functions Add unit test for string fun in leex test suite Fix the line counting in string function
Diffstat (limited to 'lib')
-rw-r--r--lib/parsetools/include/leexinc.hrl12
-rw-r--r--lib/parsetools/test/leex_SUITE.erl51
2 files changed, 53 insertions, 10 deletions
diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl
index dbbb688d2d..938aef58f9 100644
--- a/lib/parsetools/include/leexinc.hrl
+++ b/lib/parsetools/include/leexinc.hrl
@@ -36,8 +36,8 @@ string(Ics0, L0, Tcs, Ts) ->
string_cont(Ics1, L1, yyaction(A, Alen, Tcs, L0), Ts);
{reject,_Alen,Tlen,_Ics1,L1,_S1} -> % After a non-accepting state
{error,{L0,?MODULE,{illegal,yypre(Tcs, Tlen+1)}},L1};
- {A,Alen,_Tlen,_Ics1,L1,_S1} ->
- string_cont(yysuf(Tcs, Alen), L1, yyaction(A, Alen, Tcs, L0), Ts)
+ {A,Alen,_Tlen,_Ics1,_L1,_S1} ->
+ string_cont(yysuf(Tcs, Alen), L0, yyaction(A, Alen, Tcs, L0), Ts)
end.
%% string_cont(RestChars, Line, Token, Tokens)
@@ -105,8 +105,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) ->
{reject,_Alen1,Tlen1,Ics1,L1,_S1} -> % No token match
Error = {Tline,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}},
{done,{error,Error,L1},Ics1};
- {A1,Alen1,_Tlen1,_Ics1,L1,_S1} -> % Use last accept match
- token_cont(yysuf(Tcs, Alen1), L1, yyaction(A1, Alen1, Tcs, Tline))
+ {A1,Alen1,_Tlen1,_Ics1,_L1,_S1} -> % Use last accept match
+ token_cont(yysuf(Tcs, Alen1), L0, yyaction(A1, Alen1, Tcs, Tline))
end.
%% token_cont(RestChars, Line, Token)
@@ -177,9 +177,9 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) ->
%% Skip rest of tokens.
Error = {L1,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}},
skip_tokens(yysuf(Tcs, Tlen1+1), L1, Error);
- {A1,Alen1,_Tlen1,_Ics1,L1,_S1} ->
+ {A1,Alen1,_Tlen1,_Ics1,_L1,_S1} ->
Token = yyaction(A1, Alen1, Tcs, Tline),
- tokens_cont(yysuf(Tcs, Alen1), L1, Token, Ts)
+ tokens_cont(yysuf(Tcs, Alen1), L0, Token, Ts)
end.
%% tokens_cont(RestChars, Line, Token, Tokens)
diff --git a/lib/parsetools/test/leex_SUITE.erl b/lib/parsetools/test/leex_SUITE.erl
index eb15bebf63..6d2afe061e 100644
--- a/lib/parsetools/test/leex_SUITE.erl
+++ b/lib/parsetools/test/leex_SUITE.erl
@@ -43,8 +43,8 @@
file/1, compile/1, syntax/1,
pt/1, man/1, ex/1, ex2/1, not_yet/1,
-
- otp_10302/1, otp_11286/1, unicode/1]).
+ line_wrap/1,
+ otp_10302/1, otp_11286/1, unicode/1]).
% Default timetrap timeout (set in init_per_testcase).
-define(default_timeout, ?t:minutes(1)).
@@ -61,12 +61,13 @@ end_per_testcase(_Case, Config) ->
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [{group, checks}, {group, examples}].
+ [{group, checks}, {group, examples}, {group, bugs}].
groups() ->
[{checks, [], [file, compile, syntax]},
{examples, [], [pt, man, ex, ex2, not_yet, unicode]},
- {tickets, [], [otp_10302, otp_11286]}].
+ {tickets, [], [otp_10302, otp_11286]},
+ {bugs, [], [line_wrap]}].
init_per_suite(Config) ->
Config.
@@ -871,6 +872,48 @@ scan_token_1({more, Cont}, [C | Cs], Fun, Loc, Rs) ->
%% End of ex2
+line_wrap(doc) -> "Much more examples.";
+line_wrap(suite) -> [];
+line_wrap(Config) when is_list(Config) ->
+ Xrl =
+ <<"
+Definitions.
+Rules.
+[a]+[\\n]*= : {token, {first, TokenLine}}.
+[a]+ : {token, {second, TokenLine}}.
+[\\s\\r\\n\\t]+ : skip_token.
+Erlang code.
+ ">>,
+ Dir = ?privdir,
+ XrlFile = filename:join(Dir, "test_line_wrap.xrl"),
+ ?line ok = file:write_file(XrlFile, Xrl),
+ ErlFile = filename:join(Dir, "test_line_wrap.erl"),
+ {ok, _} = leex:file(XrlFile, []),
+ {ok, _} = compile:file(ErlFile, [{outdir,Dir}]),
+ code:purge(test_line_wrap),
+ AbsFile = filename:rootname(ErlFile, ".erl"),
+ code:load_abs(AbsFile, test_line_wrap),
+ fun() ->
+ S = "aaa\naaa",
+ {ok,[{second,1},{second,2}],2} = test_line_wrap:string(S)
+ end(),
+ fun() ->
+ S = "aaa\naaa",
+ {ok,[{second,3},{second,4}],4} = test_line_wrap:string(S, 3)
+ end(),
+ fun() ->
+ {done,{ok,{second,1},1},"\na"} = test_line_wrap:token([], "a\na"),
+ {more,Cont1} = test_line_wrap:token([], "\na"),
+ {done,{ok,{second,2},2},eof} = test_line_wrap:token(Cont1, eof)
+ end(),
+ fun() ->
+ {more,Cont1} = test_line_wrap:tokens([], "a\na"),
+ {done,{ok,[{second,1},{second,2}],2},eof} = test_line_wrap:tokens(Cont1, eof)
+ end(),
+ ok.
+
+%% End of line_wrap
+
not_yet(doc) ->
"Not yet implemented.";
not_yet(suite) -> [];