aboutsummaryrefslogtreecommitdiffstats
path: root/src/cowboy_http.erl
diff options
context:
space:
mode:
authorLoïc Hoguin <[email protected]>2013-03-05 17:49:58 +0100
committerLoïc Hoguin <[email protected]>2013-03-05 21:54:35 +0100
commit233cf43ab9c6c16d22e14039a79606fc935693d6 (patch)
tree22ab1570534f9ec2dc00995d797eafe97377dbf7 /src/cowboy_http.erl
parent55e98f4f61b8a7da470bed5e1473c1a186cf8c1f (diff)
downloadcowboy-233cf43ab9c6c16d22e14039a79606fc935693d6.tar.gz
cowboy-233cf43ab9c6c16d22e14039a79606fc935693d6.tar.bz2
cowboy-233cf43ab9c6c16d22e14039a79606fc935693d6.zip
Make streamed chunk size configurable
Defaults to a maximum of 1000000 bytes. Also standardize the te_identity and te_chunked decoding functions. Now they both try to read as much as possible (up to the limit), making body reading much faster when not using chunked encoding.
Diffstat (limited to 'src/cowboy_http.erl')
-rw-r--r--src/cowboy_http.erl8
1 files changed, 5 insertions, 3 deletions
diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl
index 1d19838..57dac0b 100644
--- a/src/cowboy_http.erl
+++ b/src/cowboy_http.erl
@@ -853,7 +853,7 @@ authorization_basic_password(<<C, Rest/binary>>, Fun, Acc) ->
%% @doc Decode a stream of chunks.
-spec te_chunked(Bin, TransferState)
-> more | {more, non_neg_integer(), Bin, TransferState}
- | {ok, Bin, TransferState} | {ok, Bin, Bin, TransferState}
+ | {ok, Bin, Bin, TransferState}
| {done, non_neg_integer(), Bin} | {error, badarg}
when Bin::binary(), TransferState::{non_neg_integer(), non_neg_integer()}.
te_chunked(<< "0\r\n\r\n", Rest/binary >>, {0, Streamed}) ->
@@ -879,11 +879,13 @@ te_chunked(Data, {ChunkRem, Streamed}) ->
%% @doc Decode an identity stream.
-spec te_identity(Bin, TransferState)
- -> {ok, Bin, TransferState} | {done, Bin, non_neg_integer(), Bin}
+ -> {more, non_neg_integer(), Bin, TransferState}
+ | {done, Bin, non_neg_integer(), Bin}
when Bin::binary(), TransferState::{non_neg_integer(), non_neg_integer()}.
te_identity(Data, {Streamed, Total})
when Streamed + byte_size(Data) < Total ->
- {ok, Data, {Streamed + byte_size(Data), Total}};
+ Streamed2 = Streamed + byte_size(Data),
+ {more, Total - Streamed2, Data, {Streamed2, Total}};
te_identity(Data, {Streamed, Total}) ->
Size = Total - Streamed,
<< Data2:Size/binary, Rest/binary >> = Data,