aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/ci.yaml70
-rw-r--r--Makefile11
-rw-r--r--ebin/cowlib.app3
-rw-r--r--erlang.mk4
-rw-r--r--src/cow_hpack.erl360
-rw-r--r--src/cow_hpack_common.hrl376
-rw-r--r--src/cow_http.erl681
-rw-r--r--src/cow_http1.erl421
-rw-r--r--src/cow_http2.erl10
-rw-r--r--src/cow_http2_machine.erl396
-rw-r--r--src/cow_http3.erl458
-rw-r--r--src/cow_http3_machine.erl721
-rw-r--r--src/cow_qpack.erl1581
13 files changed, 4066 insertions, 1026 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 0000000..57a2cb1
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,70 @@
+## Use workflows from ninenines/ci.erlang.mk to test Cowlib.
+
+name: Check Cowlib
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ schedule:
+ ## Every Monday at 2am.
+ - cron: 0 2 * * 1
+
+env:
+ CI_ERLANG_MK: 1
+
+jobs:
+ cleanup-master:
+ name: Cleanup master build
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: Cleanup master build if necessary
+ if: ${{ github.event_name == 'schedule' }}
+ run: |
+ gh extension install actions/gh-actions-cache
+ gh actions-cache delete Linux-X64-Erlang-master -R $REPO --confirm || true
+ gh actions-cache delete macOS-X64-Erlang-master -R $REPO --confirm || true
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ REPO: ${{ github.repository }}
+
+ check:
+ name: Cowlib
+ needs: cleanup-master
+ uses: ninenines/ci.erlang.mk/.github/workflows/ci.yaml@master
+
+# The perfs tests are nice to run but typically not
+# important. So we run them after we are done with the other
+# test suites. At this point we know that Erlang was built
+# so we can just use the latest version.
+
+ perfs:
+ name: Run performance tests
+ needs: check
+ runs-on: 'ubuntu-latest'
+ if: ${{ !cancelled() }}
+ steps:
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Output latest Erlang/OTP version
+ id: latest_version
+ run: |
+ {
+ echo "latest<<EOF"
+ make ci-list | grep -v rc | grep -v master | tail -n1
+ echo EOF
+ } >> "$GITHUB_OUTPUT"
+
+ - name: Restore CI cache
+ uses: actions/cache/restore@v4
+ with:
+ path: |
+ ~/erlang/
+ key: ${{ runner.os }}-${{ runner.arch }}-Erlang-${{ steps.latest_version.outputs.latest }}
+
+ - name: Run perfs
+ run: make perfs LATEST_ERLANG_OTP=1
diff --git a/Makefile b/Makefile
index 93debfc..b63c221 100644
--- a/Makefile
+++ b/Makefile
@@ -7,11 +7,6 @@ PROJECT_VERSION = 2.13.0
# Options.
#ERLC_OPTS += +bin_opt_info
-ifdef HIPE
- ERLC_OPTS += -smp +native
- TEST_ERLC_OPTS += -smp +native
-endif
-
DIALYZER_OPTS = -Werror_handling -Wunmatched_returns
# Dependencies.
@@ -34,10 +29,8 @@ dep_uritemplate-tests = git https://github.com/uri-templates/uritemplate-test ma
dep_ci.erlang.mk = git https://github.com/ninenines/ci.erlang.mk master
DEP_EARLY_PLUGINS = ci.erlang.mk
-AUTO_CI_OTP ?= OTP-21+
-AUTO_CI_HIPE ?= OTP-LATEST
-# AUTO_CI_ERLLVM ?= OTP-LATEST
-AUTO_CI_WINDOWS ?= OTP-21+
+AUTO_CI_OTP ?= OTP-LATEST-24+
+AUTO_CI_WINDOWS ?= OTP-LATEST-24+
# Hex configuration.
diff --git a/ebin/cowlib.app b/ebin/cowlib.app
index ade4ae5..47e4ced 100644
--- a/ebin/cowlib.app
+++ b/ebin/cowlib.app
@@ -1,8 +1,9 @@
{application, 'cowlib', [
{description, "Support library for manipulating Web protocols."},
{vsn, "2.13.0"},
- {modules, ['cow_base64url','cow_cookie','cow_date','cow_hpack','cow_http','cow_http2','cow_http2_machine','cow_http_hd','cow_http_struct_hd','cow_http_te','cow_iolists','cow_link','cow_mimetypes','cow_multipart','cow_qs','cow_spdy','cow_sse','cow_uri','cow_uri_template','cow_ws']},
+ {modules, ['cow_base64url','cow_cookie','cow_date','cow_hpack','cow_http','cow_http1','cow_http2','cow_http2_machine','cow_http3','cow_http3_machine','cow_http_hd','cow_http_struct_hd','cow_http_te','cow_iolists','cow_link','cow_mimetypes','cow_multipart','cow_qpack','cow_qs','cow_spdy','cow_sse','cow_uri','cow_uri_template','cow_ws']},
{registered, []},
{applications, [kernel,stdlib,crypto]},
+ {optional_applications, []},
{env, []}
]}. \ No newline at end of file
diff --git a/erlang.mk b/erlang.mk
index 518a1d2..0c1b3ac 100644
--- a/erlang.mk
+++ b/erlang.mk
@@ -17,7 +17,7 @@
ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
export ERLANG_MK_FILENAME
-ERLANG_MK_VERSION = 61f58ff
+ERLANG_MK_VERSION = 2022.05.31-67-g61f58ff-dirty
ERLANG_MK_WITHOUT =
# Make 3.81 and 3.82 are deprecated.
@@ -3565,7 +3565,7 @@ REBAR_DEPS_DIR = $(DEPS_DIR)
export REBAR_DEPS_DIR
REBAR3_GIT ?= https://github.com/erlang/rebar3
-REBAR3_COMMIT ?= 3f563feaf1091a1980241adefa83a32dd2eebf7c # 3.20.0
+REBAR3_COMMIT ?= 06aaecd51b0ce828b66bb65a74d3c1fd7833a4ba # 3.22.1 + OTP-27 fixes
CACHE_DEPS ?= 0
diff --git a/src/cow_hpack.erl b/src/cow_hpack.erl
index d7ae475..cddeb06 100644
--- a/src/cow_hpack.erl
+++ b/src/cow_hpack.erl
@@ -40,13 +40,17 @@
-opaque state() :: #state{}.
-export_type([state/0]).
--type opts() :: map().
--export_type([opts/0]).
+-type encoder_opts() :: #{
+ huffman => boolean()
+}.
+-export_type([encoder_opts/0]).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-endif.
+-include("cow_hpack_common.hrl").
+
%% State initialization.
-spec init() -> state().
@@ -182,22 +186,6 @@ dec_lit_no_index(Rest, State, Acc, Name) ->
%% @todo Literal header field never indexed.
-%% Decode an integer.
-
-%% The HPACK format has 4 different integer prefixes length (from 4 to 7)
-%% and each can be used to create an indefinite length integer if all bits
-%% of the prefix are set to 1.
-
-dec_int5(<< 2#11111:5, Rest/bits >>) ->
- dec_big_int(Rest, 31, 0);
-dec_int5(<< Int:5, Rest/bits >>) ->
- {Int, Rest}.
-
-dec_big_int(<< 0:1, Value:7, Rest/bits >>, Int, M) ->
- {Int + (Value bsl M), Rest};
-dec_big_int(<< 1:1, Value:7, Rest/bits >>, Int, M) ->
- dec_big_int(Rest, Int + (Value bsl M), M + 7).
-
%% Decode a string.
dec_str(<<0:1, 2#1111111:7, Rest0/bits>>) ->
@@ -213,41 +201,6 @@ dec_str(<<1:1, 2#1111111:7, Rest0/bits>>) ->
dec_str(<<1:1, Length:7, Rest/bits>>) ->
dec_huffman(Rest, Length, 0, <<>>).
-%% We use a lookup table that allows us to benefit from
-%% the binary match context optimization. A more naive
-%% implementation using bit pattern matching cannot reuse
-%% a match context because it wouldn't always match on
-%% byte boundaries.
-%%
-%% See cow_hpack_dec_huffman_lookup.hrl for more details.
-
-dec_huffman(<<A:4, B:4, R/bits>>, Len, Huff0, Acc) when Len > 1 ->
- {_, CharA, Huff1} = dec_huffman_lookup(Huff0, A),
- {_, CharB, Huff} = dec_huffman_lookup(Huff1, B),
- case {CharA, CharB} of
- {undefined, undefined} -> dec_huffman(R, Len - 1, Huff, Acc);
- {CharA, undefined} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA>>);
- {undefined, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharB>>);
- {CharA, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA, CharB>>)
- end;
-dec_huffman(<<A:4, B:4, Rest/bits>>, 1, Huff0, Acc) ->
- {_, CharA, Huff} = dec_huffman_lookup(Huff0, A),
- {ok, CharB, _} = dec_huffman_lookup(Huff, B),
- case {CharA, CharB} of
- %% {undefined, undefined} (> 7-bit final padding) is rejected with a crash.
- {CharA, undefined} ->
- {<<Acc/binary, CharA>>, Rest};
- {undefined, CharB} ->
- {<<Acc/binary, CharB>>, Rest};
- _ ->
- {<<Acc/binary, CharA, CharB>>, Rest}
- end;
-%% Can only be reached when the string length to decode is 0.
-dec_huffman(Rest, 0, _, <<>>) ->
- {<<>>, Rest}.
-
--include("cow_hpack_dec_huffman_lookup.hrl").
-
-ifdef(TEST).
%% Test case extracted from h2spec.
decode_reject_eos_test() ->
@@ -530,7 +483,8 @@ encode(Headers, State0=#state{configured_max_size=MaxSize}) ->
{Data, State} = encode(Headers, State1, huffman, []),
{[enc_int5(MaxSize, 2#001)|Data], State}.
--spec encode(cow_http:headers(), State, opts()) -> {iodata(), State} when State::state().
+-spec encode(cow_http:headers(), State, encoder_opts())
+ -> {iodata(), State} when State::state().
encode(Headers, State=#state{max_size=MaxSize, configured_max_size=MaxSize}, Opts) ->
encode(Headers, State, huffman_opt(Opts), []);
encode(Headers, State0=#state{configured_max_size=MaxSize}, Opts) ->
@@ -569,304 +523,6 @@ encode([{Name, Value0}|Tail], State, HuffmanOpt, Acc) ->
[[<< 0:1, 1:1, 0:6 >>|[enc_str(Name, HuffmanOpt)|enc_str(Value, HuffmanOpt)]]|Acc])
end.
-%% Encode an integer.
-
-enc_int5(Int, Prefix) when Int < 31 ->
- << Prefix:3, Int:5 >>;
-enc_int5(Int, Prefix) ->
- enc_big_int(Int - 31, << Prefix:3, 2#11111:5 >>).
-
-enc_int6(Int, Prefix) when Int < 63 ->
- << Prefix:2, Int:6 >>;
-enc_int6(Int, Prefix) ->
- enc_big_int(Int - 63, << Prefix:2, 2#111111:6 >>).
-
-enc_int7(Int, Prefix) when Int < 127 ->
- << Prefix:1, Int:7 >>;
-enc_int7(Int, Prefix) ->
- enc_big_int(Int - 127, << Prefix:1, 2#1111111:7 >>).
-
-enc_big_int(Int, Acc) when Int < 128 ->
- <<Acc/binary, Int:8>>;
-enc_big_int(Int, Acc) ->
- enc_big_int(Int bsr 7, <<Acc/binary, 1:1, Int:7>>).
-
-%% Encode a string.
-
-enc_str(Str, huffman) ->
- Str2 = enc_huffman(Str, <<>>),
- [enc_int7(byte_size(Str2), 2#1)|Str2];
-enc_str(Str, no_huffman) ->
- [enc_int7(byte_size(Str), 2#0)|Str].
-
-enc_huffman(<<>>, Acc) ->
- case bit_size(Acc) rem 8 of
- 1 -> << Acc/bits, 2#1111111:7 >>;
- 2 -> << Acc/bits, 2#111111:6 >>;
- 3 -> << Acc/bits, 2#11111:5 >>;
- 4 -> << Acc/bits, 2#1111:4 >>;
- 5 -> << Acc/bits, 2#111:3 >>;
- 6 -> << Acc/bits, 2#11:2 >>;
- 7 -> << Acc/bits, 2#1:1 >>;
- 0 -> Acc
- end;
-enc_huffman(<< 0, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111000:13 >>);
-enc_huffman(<< 1, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011000:23 >>);
-enc_huffman(<< 2, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100010:28 >>);
-enc_huffman(<< 3, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100011:28 >>);
-enc_huffman(<< 4, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100100:28 >>);
-enc_huffman(<< 5, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100101:28 >>);
-enc_huffman(<< 6, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100110:28 >>);
-enc_huffman(<< 7, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100111:28 >>);
-enc_huffman(<< 8, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101000:28 >>);
-enc_huffman(<< 9, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101010:24 >>);
-enc_huffman(<< 10, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111100:30 >>);
-enc_huffman(<< 11, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101001:28 >>);
-enc_huffman(<< 12, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101010:28 >>);
-enc_huffman(<< 13, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111101:30 >>);
-enc_huffman(<< 14, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101011:28 >>);
-enc_huffman(<< 15, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101100:28 >>);
-enc_huffman(<< 16, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101101:28 >>);
-enc_huffman(<< 17, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101110:28 >>);
-enc_huffman(<< 18, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101111:28 >>);
-enc_huffman(<< 19, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110000:28 >>);
-enc_huffman(<< 20, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110001:28 >>);
-enc_huffman(<< 21, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110010:28 >>);
-enc_huffman(<< 22, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111110:30 >>);
-enc_huffman(<< 23, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110011:28 >>);
-enc_huffman(<< 24, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110100:28 >>);
-enc_huffman(<< 25, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110101:28 >>);
-enc_huffman(<< 26, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110110:28 >>);
-enc_huffman(<< 27, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110111:28 >>);
-enc_huffman(<< 28, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111000:28 >>);
-enc_huffman(<< 29, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111001:28 >>);
-enc_huffman(<< 30, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111010:28 >>);
-enc_huffman(<< 31, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111011:28 >>);
-enc_huffman(<< 32, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010100:6 >>);
-enc_huffman(<< 33, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111000:10 >>);
-enc_huffman(<< 34, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111001:10 >>);
-enc_huffman(<< 35, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111010:12 >>);
-enc_huffman(<< 36, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111001:13 >>);
-enc_huffman(<< 37, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010101:6 >>);
-enc_huffman(<< 38, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111000:8 >>);
-enc_huffman(<< 39, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111010:11 >>);
-enc_huffman(<< 40, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111010:10 >>);
-enc_huffman(<< 41, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111011:10 >>);
-enc_huffman(<< 42, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111001:8 >>);
-enc_huffman(<< 43, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111011:11 >>);
-enc_huffman(<< 44, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111010:8 >>);
-enc_huffman(<< 45, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010110:6 >>);
-enc_huffman(<< 46, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010111:6 >>);
-enc_huffman(<< 47, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011000:6 >>);
-enc_huffman(<< 48, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00000:5 >>);
-enc_huffman(<< 49, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00001:5 >>);
-enc_huffman(<< 50, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00010:5 >>);
-enc_huffman(<< 51, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011001:6 >>);
-enc_huffman(<< 52, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011010:6 >>);
-enc_huffman(<< 53, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011011:6 >>);
-enc_huffman(<< 54, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011100:6 >>);
-enc_huffman(<< 55, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011101:6 >>);
-enc_huffman(<< 56, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011110:6 >>);
-enc_huffman(<< 57, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011111:6 >>);
-enc_huffman(<< 58, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011100:7 >>);
-enc_huffman(<< 59, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111011:8 >>);
-enc_huffman(<< 60, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111100:15 >>);
-enc_huffman(<< 61, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100000:6 >>);
-enc_huffman(<< 62, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111011:12 >>);
-enc_huffman(<< 63, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111100:10 >>);
-enc_huffman(<< 64, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111010:13 >>);
-enc_huffman(<< 65, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100001:6 >>);
-enc_huffman(<< 66, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011101:7 >>);
-enc_huffman(<< 67, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011110:7 >>);
-enc_huffman(<< 68, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011111:7 >>);
-enc_huffman(<< 69, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100000:7 >>);
-enc_huffman(<< 70, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100001:7 >>);
-enc_huffman(<< 71, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100010:7 >>);
-enc_huffman(<< 72, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100011:7 >>);
-enc_huffman(<< 73, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100100:7 >>);
-enc_huffman(<< 74, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100101:7 >>);
-enc_huffman(<< 75, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100110:7 >>);
-enc_huffman(<< 76, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100111:7 >>);
-enc_huffman(<< 77, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101000:7 >>);
-enc_huffman(<< 78, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101001:7 >>);
-enc_huffman(<< 79, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101010:7 >>);
-enc_huffman(<< 80, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101011:7 >>);
-enc_huffman(<< 81, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101100:7 >>);
-enc_huffman(<< 82, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101101:7 >>);
-enc_huffman(<< 83, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101110:7 >>);
-enc_huffman(<< 84, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101111:7 >>);
-enc_huffman(<< 85, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110000:7 >>);
-enc_huffman(<< 86, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110001:7 >>);
-enc_huffman(<< 87, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110010:7 >>);
-enc_huffman(<< 88, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111100:8 >>);
-enc_huffman(<< 89, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110011:7 >>);
-enc_huffman(<< 90, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111101:8 >>);
-enc_huffman(<< 91, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111011:13 >>);
-enc_huffman(<< 92, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110000:19 >>);
-enc_huffman(<< 93, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111100:13 >>);
-enc_huffman(<< 94, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111100:14 >>);
-enc_huffman(<< 95, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100010:6 >>);
-enc_huffman(<< 96, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111101:15 >>);
-enc_huffman(<< 97, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00011:5 >>);
-enc_huffman(<< 98, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100011:6 >>);
-enc_huffman(<< 99, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00100:5 >>);
-enc_huffman(<< 100, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100100:6 >>);
-enc_huffman(<< 101, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00101:5 >>);
-enc_huffman(<< 102, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100101:6 >>);
-enc_huffman(<< 103, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100110:6 >>);
-enc_huffman(<< 104, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100111:6 >>);
-enc_huffman(<< 105, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00110:5 >>);
-enc_huffman(<< 106, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110100:7 >>);
-enc_huffman(<< 107, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110101:7 >>);
-enc_huffman(<< 108, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101000:6 >>);
-enc_huffman(<< 109, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101001:6 >>);
-enc_huffman(<< 110, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101010:6 >>);
-enc_huffman(<< 111, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00111:5 >>);
-enc_huffman(<< 112, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101011:6 >>);
-enc_huffman(<< 113, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110110:7 >>);
-enc_huffman(<< 114, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101100:6 >>);
-enc_huffman(<< 115, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01000:5 >>);
-enc_huffman(<< 116, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01001:5 >>);
-enc_huffman(<< 117, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101101:6 >>);
-enc_huffman(<< 118, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110111:7 >>);
-enc_huffman(<< 119, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111000:7 >>);
-enc_huffman(<< 120, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111001:7 >>);
-enc_huffman(<< 121, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111010:7 >>);
-enc_huffman(<< 122, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111011:7 >>);
-enc_huffman(<< 123, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111110:15 >>);
-enc_huffman(<< 124, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111100:11 >>);
-enc_huffman(<< 125, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111101:14 >>);
-enc_huffman(<< 126, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111101:13 >>);
-enc_huffman(<< 127, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111100:28 >>);
-enc_huffman(<< 128, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100110:20 >>);
-enc_huffman(<< 129, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010010:22 >>);
-enc_huffman(<< 130, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100111:20 >>);
-enc_huffman(<< 131, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101000:20 >>);
-enc_huffman(<< 132, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010011:22 >>);
-enc_huffman(<< 133, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010100:22 >>);
-enc_huffman(<< 134, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010101:22 >>);
-enc_huffman(<< 135, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011001:23 >>);
-enc_huffman(<< 136, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010110:22 >>);
-enc_huffman(<< 137, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011010:23 >>);
-enc_huffman(<< 138, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011011:23 >>);
-enc_huffman(<< 139, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011100:23 >>);
-enc_huffman(<< 140, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011101:23 >>);
-enc_huffman(<< 141, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011110:23 >>);
-enc_huffman(<< 142, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101011:24 >>);
-enc_huffman(<< 143, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011111:23 >>);
-enc_huffman(<< 144, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101100:24 >>);
-enc_huffman(<< 145, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101101:24 >>);
-enc_huffman(<< 146, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010111:22 >>);
-enc_huffman(<< 147, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100000:23 >>);
-enc_huffman(<< 148, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101110:24 >>);
-enc_huffman(<< 149, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100001:23 >>);
-enc_huffman(<< 150, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100010:23 >>);
-enc_huffman(<< 151, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100011:23 >>);
-enc_huffman(<< 152, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100100:23 >>);
-enc_huffman(<< 153, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011100:21 >>);
-enc_huffman(<< 154, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011000:22 >>);
-enc_huffman(<< 155, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100101:23 >>);
-enc_huffman(<< 156, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011001:22 >>);
-enc_huffman(<< 157, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100110:23 >>);
-enc_huffman(<< 158, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100111:23 >>);
-enc_huffman(<< 159, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101111:24 >>);
-enc_huffman(<< 160, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011010:22 >>);
-enc_huffman(<< 161, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011101:21 >>);
-enc_huffman(<< 162, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101001:20 >>);
-enc_huffman(<< 163, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011011:22 >>);
-enc_huffman(<< 164, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011100:22 >>);
-enc_huffman(<< 165, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101000:23 >>);
-enc_huffman(<< 166, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101001:23 >>);
-enc_huffman(<< 167, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011110:21 >>);
-enc_huffman(<< 168, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101010:23 >>);
-enc_huffman(<< 169, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011101:22 >>);
-enc_huffman(<< 170, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011110:22 >>);
-enc_huffman(<< 171, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110000:24 >>);
-enc_huffman(<< 172, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011111:21 >>);
-enc_huffman(<< 173, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011111:22 >>);
-enc_huffman(<< 174, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101011:23 >>);
-enc_huffman(<< 175, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101100:23 >>);
-enc_huffman(<< 176, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100000:21 >>);
-enc_huffman(<< 177, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100001:21 >>);
-enc_huffman(<< 178, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100000:22 >>);
-enc_huffman(<< 179, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100010:21 >>);
-enc_huffman(<< 180, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101101:23 >>);
-enc_huffman(<< 181, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100001:22 >>);
-enc_huffman(<< 182, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101110:23 >>);
-enc_huffman(<< 183, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101111:23 >>);
-enc_huffman(<< 184, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101010:20 >>);
-enc_huffman(<< 185, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100010:22 >>);
-enc_huffman(<< 186, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100011:22 >>);
-enc_huffman(<< 187, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100100:22 >>);
-enc_huffman(<< 188, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110000:23 >>);
-enc_huffman(<< 189, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100101:22 >>);
-enc_huffman(<< 190, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100110:22 >>);
-enc_huffman(<< 191, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110001:23 >>);
-enc_huffman(<< 192, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100000:26 >>);
-enc_huffman(<< 193, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100001:26 >>);
-enc_huffman(<< 194, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101011:20 >>);
-enc_huffman(<< 195, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110001:19 >>);
-enc_huffman(<< 196, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100111:22 >>);
-enc_huffman(<< 197, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110010:23 >>);
-enc_huffman(<< 198, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101000:22 >>);
-enc_huffman(<< 199, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101100:25 >>);
-enc_huffman(<< 200, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100010:26 >>);
-enc_huffman(<< 201, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100011:26 >>);
-enc_huffman(<< 202, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100100:26 >>);
-enc_huffman(<< 203, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011110:27 >>);
-enc_huffman(<< 204, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011111:27 >>);
-enc_huffman(<< 205, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100101:26 >>);
-enc_huffman(<< 206, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110001:24 >>);
-enc_huffman(<< 207, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101101:25 >>);
-enc_huffman(<< 208, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110010:19 >>);
-enc_huffman(<< 209, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100011:21 >>);
-enc_huffman(<< 210, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100110:26 >>);
-enc_huffman(<< 211, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100000:27 >>);
-enc_huffman(<< 212, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100001:27 >>);
-enc_huffman(<< 213, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100111:26 >>);
-enc_huffman(<< 214, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100010:27 >>);
-enc_huffman(<< 215, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110010:24 >>);
-enc_huffman(<< 216, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100100:21 >>);
-enc_huffman(<< 217, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100101:21 >>);
-enc_huffman(<< 218, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101000:26 >>);
-enc_huffman(<< 219, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101001:26 >>);
-enc_huffman(<< 220, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111101:28 >>);
-enc_huffman(<< 221, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100011:27 >>);
-enc_huffman(<< 222, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100100:27 >>);
-enc_huffman(<< 223, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100101:27 >>);
-enc_huffman(<< 224, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101100:20 >>);
-enc_huffman(<< 225, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110011:24 >>);
-enc_huffman(<< 226, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101101:20 >>);
-enc_huffman(<< 227, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100110:21 >>);
-enc_huffman(<< 228, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101001:22 >>);
-enc_huffman(<< 229, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100111:21 >>);
-enc_huffman(<< 230, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111101000:21 >>);
-enc_huffman(<< 231, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110011:23 >>);
-enc_huffman(<< 232, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101010:22 >>);
-enc_huffman(<< 233, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101011:22 >>);
-enc_huffman(<< 234, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101110:25 >>);
-enc_huffman(<< 235, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101111:25 >>);
-enc_huffman(<< 236, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110100:24 >>);
-enc_huffman(<< 237, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110101:24 >>);
-enc_huffman(<< 238, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101010:26 >>);
-enc_huffman(<< 239, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110100:23 >>);
-enc_huffman(<< 240, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101011:26 >>);
-enc_huffman(<< 241, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100110:27 >>);
-enc_huffman(<< 242, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101100:26 >>);
-enc_huffman(<< 243, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101101:26 >>);
-enc_huffman(<< 244, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100111:27 >>);
-enc_huffman(<< 245, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101000:27 >>);
-enc_huffman(<< 246, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101001:27 >>);
-enc_huffman(<< 247, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101010:27 >>);
-enc_huffman(<< 248, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101011:27 >>);
-enc_huffman(<< 249, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111110:28 >>);
-enc_huffman(<< 250, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101100:27 >>);
-enc_huffman(<< 251, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101101:27 >>);
-enc_huffman(<< 252, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101110:27 >>);
-enc_huffman(<< 253, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101111:27 >>);
-enc_huffman(<< 254, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111110000:27 >>);
-enc_huffman(<< 255, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101110:26 >>).
-
-ifdef(TEST).
req_encode_test() ->
%% First request (raw then huffman).
diff --git a/src/cow_hpack_common.hrl b/src/cow_hpack_common.hrl
new file mode 100644
index 0000000..92f9514
--- /dev/null
+++ b/src/cow_hpack_common.hrl
@@ -0,0 +1,376 @@
+%% Copyright (c) 2015-2020, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The prefixed integer and the string formats are common
+%% to both HPACK and QPACK. They are included directly in
+%% each module in order to avoid fully-qualified calls and
+%% slightly improve performance.
+%%
+%% Some functions are only used in one or the other even
+%% though the format is the same. In that case the functions
+%% can be found in the relevant module.
+%%
+%% Any tests relevant to these functions should be added to
+%% cow_hpack since HPACK is where these originate from.
+
+%% Prefix decoding.
+%%
+%% The HPACK format has 4 different integer prefixes length (from 4 to 7)
+%% and each can be used to create an indefinite length integer if all bits
+%% of the prefix are set to 1.
+
+dec_int5(<<2#11111:5, Rest/bits>>) ->
+ dec_big_int(Rest, 31, 0);
+dec_int5(<<Int:5, Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_big_int(<<0:1, Value:7, Rest/bits>>, Int, M) ->
+ {Int + (Value bsl M), Rest};
+dec_big_int(<<1:1, Value:7, Rest/bits>>, Int, M) ->
+ dec_big_int(Rest, Int + (Value bsl M), M + 7).
+
+%% Prefix encoding.
+
+enc_int5(Int, Prefix) when Int < 31 ->
+ <<Prefix:3, Int:5>>;
+enc_int5(Int, Prefix) ->
+ enc_big_int(Int - 31, <<Prefix:3, 2#11111:5>>).
+
+enc_int6(Int, Prefix) when Int < 63 ->
+ <<Prefix:2, Int:6>>;
+enc_int6(Int, Prefix) ->
+ enc_big_int(Int - 63, <<Prefix:2, 2#111111:6>>).
+
+enc_int7(Int, Prefix) when Int < 127 ->
+ <<Prefix:1, Int:7>>;
+enc_int7(Int, Prefix) ->
+ enc_big_int(Int - 127, <<Prefix:1, 2#1111111:7>>).
+
+enc_big_int(Int, Acc) when Int < 128 ->
+ <<Acc/binary, Int:8>>;
+enc_big_int(Int, Acc) ->
+ enc_big_int(Int bsr 7, <<Acc/binary, 1:1, Int:7>>).
+
+%% String decoding.
+%%
+%% We use a lookup table that allows us to benefit from
+%% the binary match context optimization. A more naive
+%% implementation using bit pattern matching cannot reuse
+%% a match context because it wouldn't always match on
+%% byte boundaries.
+%%
+%% See cow_hpack_dec_huffman_lookup.hrl for more details.
+
+dec_huffman(<<A:4, B:4, R/bits>>, Len, Huff0, Acc) when Len > 1 ->
+ {_, CharA, Huff1} = dec_huffman_lookup(Huff0, A),
+ {_, CharB, Huff} = dec_huffman_lookup(Huff1, B),
+ case {CharA, CharB} of
+ {undefined, undefined} -> dec_huffman(R, Len - 1, Huff, Acc);
+ {CharA, undefined} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA>>);
+ {undefined, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharB>>);
+ {CharA, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA, CharB>>)
+ end;
+dec_huffman(<<A:4, B:4, Rest/bits>>, 1, Huff0, Acc) ->
+ {_, CharA, Huff} = dec_huffman_lookup(Huff0, A),
+ {ok, CharB, _} = dec_huffman_lookup(Huff, B),
+ case {CharA, CharB} of
+ %% {undefined, undefined} (> 7-bit final padding) is rejected with a crash.
+ {CharA, undefined} ->
+ {<<Acc/binary, CharA>>, Rest};
+ {undefined, CharB} ->
+ {<<Acc/binary, CharB>>, Rest};
+ _ ->
+ {<<Acc/binary, CharA, CharB>>, Rest}
+ end;
+%% Can only be reached when the string length to decode is 0.
+dec_huffman(Rest, 0, _, <<>>) ->
+ {<<>>, Rest}.
+
+-include("cow_hpack_dec_huffman_lookup.hrl").
+
+%% String encoding.
+
+enc_str(Str, huffman) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int7(byte_size(Str2), 2#1)|Str2];
+enc_str(Str, no_huffman) ->
+ [enc_int7(byte_size(Str), 2#0)|Str].
+
+enc_huffman(<<>>, Acc) ->
+ case bit_size(Acc) rem 8 of
+ 1 -> <<Acc/bits, 2#1111111:7>>;
+ 2 -> <<Acc/bits, 2#111111:6>>;
+ 3 -> <<Acc/bits, 2#11111:5>>;
+ 4 -> <<Acc/bits, 2#1111:4>>;
+ 5 -> <<Acc/bits, 2#111:3>>;
+ 6 -> <<Acc/bits, 2#11:2>>;
+ 7 -> <<Acc/bits, 2#1:1>>;
+ 0 -> Acc
+ end;
+enc_huffman(<<0, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111000:13>>);
+enc_huffman(<<1, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011000:23>>);
+enc_huffman(<<2, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100010:28>>);
+enc_huffman(<<3, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100011:28>>);
+enc_huffman(<<4, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100100:28>>);
+enc_huffman(<<5, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100101:28>>);
+enc_huffman(<<6, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100110:28>>);
+enc_huffman(<<7, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100111:28>>);
+enc_huffman(<<8, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101000:28>>);
+enc_huffman(<<9, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101010:24>>);
+enc_huffman(<<10, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111100:30>>);
+enc_huffman(<<11, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101001:28>>);
+enc_huffman(<<12, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101010:28>>);
+enc_huffman(<<13, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111101:30>>);
+enc_huffman(<<14, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101011:28>>);
+enc_huffman(<<15, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101100:28>>);
+enc_huffman(<<16, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101101:28>>);
+enc_huffman(<<17, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101110:28>>);
+enc_huffman(<<18, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101111:28>>);
+enc_huffman(<<19, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110000:28>>);
+enc_huffman(<<20, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110001:28>>);
+enc_huffman(<<21, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110010:28>>);
+enc_huffman(<<22, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111110:30>>);
+enc_huffman(<<23, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110011:28>>);
+enc_huffman(<<24, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110100:28>>);
+enc_huffman(<<25, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110101:28>>);
+enc_huffman(<<26, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110110:28>>);
+enc_huffman(<<27, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110111:28>>);
+enc_huffman(<<28, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111000:28>>);
+enc_huffman(<<29, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111001:28>>);
+enc_huffman(<<30, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111010:28>>);
+enc_huffman(<<31, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111011:28>>);
+enc_huffman(<<32, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010100:6>>);
+enc_huffman(<<33, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111000:10>>);
+enc_huffman(<<34, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111001:10>>);
+enc_huffman(<<35, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111010:12>>);
+enc_huffman(<<36, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111001:13>>);
+enc_huffman(<<37, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010101:6>>);
+enc_huffman(<<38, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111000:8>>);
+enc_huffman(<<39, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111010:11>>);
+enc_huffman(<<40, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111010:10>>);
+enc_huffman(<<41, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111011:10>>);
+enc_huffman(<<42, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111001:8>>);
+enc_huffman(<<43, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111011:11>>);
+enc_huffman(<<44, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111010:8>>);
+enc_huffman(<<45, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010110:6>>);
+enc_huffman(<<46, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010111:6>>);
+enc_huffman(<<47, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011000:6>>);
+enc_huffman(<<48, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00000:5>>);
+enc_huffman(<<49, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00001:5>>);
+enc_huffman(<<50, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00010:5>>);
+enc_huffman(<<51, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011001:6>>);
+enc_huffman(<<52, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011010:6>>);
+enc_huffman(<<53, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011011:6>>);
+enc_huffman(<<54, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011100:6>>);
+enc_huffman(<<55, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011101:6>>);
+enc_huffman(<<56, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011110:6>>);
+enc_huffman(<<57, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011111:6>>);
+enc_huffman(<<58, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011100:7>>);
+enc_huffman(<<59, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111011:8>>);
+enc_huffman(<<60, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111100:15>>);
+enc_huffman(<<61, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100000:6>>);
+enc_huffman(<<62, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111011:12>>);
+enc_huffman(<<63, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111100:10>>);
+enc_huffman(<<64, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111010:13>>);
+enc_huffman(<<65, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100001:6>>);
+enc_huffman(<<66, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011101:7>>);
+enc_huffman(<<67, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011110:7>>);
+enc_huffman(<<68, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011111:7>>);
+enc_huffman(<<69, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100000:7>>);
+enc_huffman(<<70, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100001:7>>);
+enc_huffman(<<71, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100010:7>>);
+enc_huffman(<<72, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100011:7>>);
+enc_huffman(<<73, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100100:7>>);
+enc_huffman(<<74, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100101:7>>);
+enc_huffman(<<75, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100110:7>>);
+enc_huffman(<<76, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100111:7>>);
+enc_huffman(<<77, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101000:7>>);
+enc_huffman(<<78, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101001:7>>);
+enc_huffman(<<79, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101010:7>>);
+enc_huffman(<<80, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101011:7>>);
+enc_huffman(<<81, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101100:7>>);
+enc_huffman(<<82, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101101:7>>);
+enc_huffman(<<83, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101110:7>>);
+enc_huffman(<<84, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101111:7>>);
+enc_huffman(<<85, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110000:7>>);
+enc_huffman(<<86, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110001:7>>);
+enc_huffman(<<87, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110010:7>>);
+enc_huffman(<<88, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111100:8>>);
+enc_huffman(<<89, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110011:7>>);
+enc_huffman(<<90, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111101:8>>);
+enc_huffman(<<91, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111011:13>>);
+enc_huffman(<<92, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110000:19>>);
+enc_huffman(<<93, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111100:13>>);
+enc_huffman(<<94, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111100:14>>);
+enc_huffman(<<95, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100010:6>>);
+enc_huffman(<<96, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111101:15>>);
+enc_huffman(<<97, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00011:5>>);
+enc_huffman(<<98, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100011:6>>);
+enc_huffman(<<99, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00100:5>>);
+enc_huffman(<<100, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100100:6>>);
+enc_huffman(<<101, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00101:5>>);
+enc_huffman(<<102, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100101:6>>);
+enc_huffman(<<103, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100110:6>>);
+enc_huffman(<<104, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100111:6>>);
+enc_huffman(<<105, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00110:5>>);
+enc_huffman(<<106, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110100:7>>);
+enc_huffman(<<107, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110101:7>>);
+enc_huffman(<<108, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101000:6>>);
+enc_huffman(<<109, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101001:6>>);
+enc_huffman(<<110, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101010:6>>);
+enc_huffman(<<111, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00111:5>>);
+enc_huffman(<<112, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101011:6>>);
+enc_huffman(<<113, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110110:7>>);
+enc_huffman(<<114, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101100:6>>);
+enc_huffman(<<115, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#01000:5>>);
+enc_huffman(<<116, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#01001:5>>);
+enc_huffman(<<117, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101101:6>>);
+enc_huffman(<<118, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110111:7>>);
+enc_huffman(<<119, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111000:7>>);
+enc_huffman(<<120, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111001:7>>);
+enc_huffman(<<121, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111010:7>>);
+enc_huffman(<<122, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111011:7>>);
+enc_huffman(<<123, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111110:15>>);
+enc_huffman(<<124, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111100:11>>);
+enc_huffman(<<125, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111101:14>>);
+enc_huffman(<<126, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111101:13>>);
+enc_huffman(<<127, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111100:28>>);
+enc_huffman(<<128, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111100110:20>>);
+enc_huffman(<<129, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010010:22>>);
+enc_huffman(<<130, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111100111:20>>);
+enc_huffman(<<131, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101000:20>>);
+enc_huffman(<<132, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010011:22>>);
+enc_huffman(<<133, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010100:22>>);
+enc_huffman(<<134, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010101:22>>);
+enc_huffman(<<135, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011001:23>>);
+enc_huffman(<<136, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010110:22>>);
+enc_huffman(<<137, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011010:23>>);
+enc_huffman(<<138, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011011:23>>);
+enc_huffman(<<139, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011100:23>>);
+enc_huffman(<<140, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011101:23>>);
+enc_huffman(<<141, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011110:23>>);
+enc_huffman(<<142, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101011:24>>);
+enc_huffman(<<143, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011111:23>>);
+enc_huffman(<<144, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101100:24>>);
+enc_huffman(<<145, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101101:24>>);
+enc_huffman(<<146, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010111:22>>);
+enc_huffman(<<147, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100000:23>>);
+enc_huffman(<<148, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101110:24>>);
+enc_huffman(<<149, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100001:23>>);
+enc_huffman(<<150, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100010:23>>);
+enc_huffman(<<151, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100011:23>>);
+enc_huffman(<<152, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100100:23>>);
+enc_huffman(<<153, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011100:21>>);
+enc_huffman(<<154, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011000:22>>);
+enc_huffman(<<155, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100101:23>>);
+enc_huffman(<<156, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011001:22>>);
+enc_huffman(<<157, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100110:23>>);
+enc_huffman(<<158, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100111:23>>);
+enc_huffman(<<159, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101111:24>>);
+enc_huffman(<<160, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011010:22>>);
+enc_huffman(<<161, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011101:21>>);
+enc_huffman(<<162, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101001:20>>);
+enc_huffman(<<163, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011011:22>>);
+enc_huffman(<<164, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011100:22>>);
+enc_huffman(<<165, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101000:23>>);
+enc_huffman(<<166, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101001:23>>);
+enc_huffman(<<167, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011110:21>>);
+enc_huffman(<<168, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101010:23>>);
+enc_huffman(<<169, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011101:22>>);
+enc_huffman(<<170, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011110:22>>);
+enc_huffman(<<171, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110000:24>>);
+enc_huffman(<<172, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011111:21>>);
+enc_huffman(<<173, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011111:22>>);
+enc_huffman(<<174, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101011:23>>);
+enc_huffman(<<175, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101100:23>>);
+enc_huffman(<<176, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100000:21>>);
+enc_huffman(<<177, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100001:21>>);
+enc_huffman(<<178, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100000:22>>);
+enc_huffman(<<179, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100010:21>>);
+enc_huffman(<<180, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101101:23>>);
+enc_huffman(<<181, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100001:22>>);
+enc_huffman(<<182, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101110:23>>);
+enc_huffman(<<183, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101111:23>>);
+enc_huffman(<<184, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101010:20>>);
+enc_huffman(<<185, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100010:22>>);
+enc_huffman(<<186, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100011:22>>);
+enc_huffman(<<187, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100100:22>>);
+enc_huffman(<<188, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110000:23>>);
+enc_huffman(<<189, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100101:22>>);
+enc_huffman(<<190, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100110:22>>);
+enc_huffman(<<191, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110001:23>>);
+enc_huffman(<<192, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100000:26>>);
+enc_huffman(<<193, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100001:26>>);
+enc_huffman(<<194, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101011:20>>);
+enc_huffman(<<195, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110001:19>>);
+enc_huffman(<<196, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100111:22>>);
+enc_huffman(<<197, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110010:23>>);
+enc_huffman(<<198, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101000:22>>);
+enc_huffman(<<199, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101100:25>>);
+enc_huffman(<<200, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100010:26>>);
+enc_huffman(<<201, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100011:26>>);
+enc_huffman(<<202, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100100:26>>);
+enc_huffman(<<203, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111011110:27>>);
+enc_huffman(<<204, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111011111:27>>);
+enc_huffman(<<205, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100101:26>>);
+enc_huffman(<<206, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110001:24>>);
+enc_huffman(<<207, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101101:25>>);
+enc_huffman(<<208, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110010:19>>);
+enc_huffman(<<209, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100011:21>>);
+enc_huffman(<<210, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100110:26>>);
+enc_huffman(<<211, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100000:27>>);
+enc_huffman(<<212, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100001:27>>);
+enc_huffman(<<213, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100111:26>>);
+enc_huffman(<<214, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100010:27>>);
+enc_huffman(<<215, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110010:24>>);
+enc_huffman(<<216, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100100:21>>);
+enc_huffman(<<217, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100101:21>>);
+enc_huffman(<<218, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101000:26>>);
+enc_huffman(<<219, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101001:26>>);
+enc_huffman(<<220, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111101:28>>);
+enc_huffman(<<221, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100011:27>>);
+enc_huffman(<<222, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100100:27>>);
+enc_huffman(<<223, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100101:27>>);
+enc_huffman(<<224, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101100:20>>);
+enc_huffman(<<225, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110011:24>>);
+enc_huffman(<<226, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101101:20>>);
+enc_huffman(<<227, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100110:21>>);
+enc_huffman(<<228, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101001:22>>);
+enc_huffman(<<229, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100111:21>>);
+enc_huffman(<<230, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111101000:21>>);
+enc_huffman(<<231, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110011:23>>);
+enc_huffman(<<232, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101010:22>>);
+enc_huffman(<<233, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101011:22>>);
+enc_huffman(<<234, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101110:25>>);
+enc_huffman(<<235, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101111:25>>);
+enc_huffman(<<236, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110100:24>>);
+enc_huffman(<<237, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110101:24>>);
+enc_huffman(<<238, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101010:26>>);
+enc_huffman(<<239, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110100:23>>);
+enc_huffman(<<240, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101011:26>>);
+enc_huffman(<<241, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100110:27>>);
+enc_huffman(<<242, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101100:26>>);
+enc_huffman(<<243, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101101:26>>);
+enc_huffman(<<244, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100111:27>>);
+enc_huffman(<<245, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101000:27>>);
+enc_huffman(<<246, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101001:27>>);
+enc_huffman(<<247, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101010:27>>);
+enc_huffman(<<248, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101011:27>>);
+enc_huffman(<<249, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111110:28>>);
+enc_huffman(<<250, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101100:27>>);
+enc_huffman(<<251, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101101:27>>);
+enc_huffman(<<252, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101110:27>>);
+enc_huffman(<<253, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101111:27>>);
+enc_huffman(<<254, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111110000:27>>);
+enc_huffman(<<255, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101110:26>>).
diff --git a/src/cow_http.erl b/src/cow_http.erl
index 93e9193..b4bc672 100644
--- a/src/cow_http.erl
+++ b/src/cow_http.erl
@@ -12,23 +12,35 @@
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+%% This module contains functions and types common
+%% to all or most HTTP versions.
-module(cow_http).
+%% The HTTP/1 functions have been moved to cow_http1.
+%% In order to remain backward compatible we redirect
+%% calls to cow_http1. The type version() was moved
+%% and no fallback is provided.
+%%
+%% @todo Remove the aliases in Cowlib 3.0.
-export([parse_request_line/1]).
-export([parse_status_line/1]).
-export([status_to_integer/1]).
-export([parse_headers/1]).
-
-export([parse_fullpath/1]).
-export([parse_version/1]).
-
-export([request/4]).
-export([response/3]).
-export([headers/1]).
-export([version/1]).
--type version() :: 'HTTP/1.0' | 'HTTP/1.1'.
--export_type([version/0]).
+%% Functions used by HTTP/2+.
+
+-export([format_semantic_error/1]).
+-export([merge_pseudo_headers/2]).
+-export([process_headers/5]).
+-export([remove_http1_headers/1]).
+
+%% Types used by all versions of HTTP.
-type status() :: 100..999.
-export_type([status/0]).
@@ -36,391 +48,324 @@
-type headers() :: [{binary(), iodata()}].
-export_type([headers/0]).
--include("cow_inline.hrl").
-
-%% @doc Parse the request line.
-
--spec parse_request_line(binary()) -> {binary(), binary(), version(), binary()}.
-parse_request_line(Data) ->
- {Pos, _} = binary:match(Data, <<"\r">>),
- <<RequestLine:Pos/binary, "\r\n", Rest/bits>> = Data,
- [Method, Target, Version0] = binary:split(RequestLine, <<$\s>>, [trim_all, global]),
- Version = case Version0 of
- <<"HTTP/1.1">> -> 'HTTP/1.1';
- <<"HTTP/1.0">> -> 'HTTP/1.0'
- end,
- {Method, Target, Version, Rest}.
-
--ifdef(TEST).
-parse_request_line_test_() ->
- Tests = [
- {<<"GET /path HTTP/1.0\r\nRest">>,
- {<<"GET">>, <<"/path">>, 'HTTP/1.0', <<"Rest">>}},
- {<<"GET /path HTTP/1.1\r\nRest">>,
- {<<"GET">>, <<"/path">>, 'HTTP/1.1', <<"Rest">>}},
- {<<"CONNECT proxy.example.org:1080 HTTP/1.1\r\nRest">>,
- {<<"CONNECT">>, <<"proxy.example.org:1080">>, 'HTTP/1.1', <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_request_line(V) end}
- || {V, R} <- Tests].
-
-parse_request_line_error_test_() ->
- Tests = [
- <<>>,
- <<"GET">>,
- <<"GET /path\r\n">>,
- <<"GET /path HTTP/1.1">>,
- <<"GET /path HTTP/1.1\r">>,
- <<"GET /path HTTP/1.1\n">>,
- <<"GET /path HTTP/0.9\r\n">>,
- <<"content-type: text/plain\r\n">>,
- <<0:80, "\r\n">>
- ],
- [{V, fun() -> {'EXIT', _} = (catch parse_request_line(V)) end}
- || V <- Tests].
-
-horse_parse_request_line_get_path() ->
- horse:repeat(200000,
- parse_request_line(<<"GET /path HTTP/1.1\r\n">>)
- ).
--endif.
-
-%% @doc Parse the status line.
-
--spec parse_status_line(binary()) -> {version(), status(), binary(), binary()}.
-parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 200, <<"OK">>, Rest};
-parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 404, <<"Not Found">>, Rest};
-parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest};
-parse_status_line(<< "HTTP/1.1 ", Status/bits >>) ->
- parse_status_line(Status, 'HTTP/1.1');
-parse_status_line(<< "HTTP/1.0 ", Status/bits >>) ->
- parse_status_line(Status, 'HTTP/1.0').
-
-parse_status_line(<<H, T, U, " ", Rest/bits>>, Version) ->
- Status = status_to_integer(H, T, U),
- {Pos, _} = binary:match(Rest, <<"\r">>),
- << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest,
- {Version, Status, StatusStr, Rest2}.
+%% Types used by HTTP/2+.
+
+-type pseudo_headers() :: #{} %% Trailers
+ | #{ %% Responses.
+ status := cow_http:status()
+ } | #{ %% Normal CONNECT requests.
+ method := binary(),
+ authority := binary()
+ } | #{ %% Extended CONNECT requests.
+ method := binary(),
+ scheme := binary(),
+ authority := binary(),
+ path := binary(),
+ protocol := binary()
+ } | #{ %% Other requests.
+ method := binary(),
+ scheme := binary(),
+ authority => binary(),
+ path := binary()
+ }.
+-export_type([pseudo_headers/0]).
+
+-type fin() :: fin | nofin.
+-export_type([fin/0]).
+
+%% HTTP/1 function aliases.
+
+-spec parse_request_line(binary()) -> {binary(), binary(), cow_http1:version(), binary()}.
+parse_request_line(Data) -> cow_http1:parse_request_line(Data).
+
+-spec parse_status_line(binary()) -> {cow_http1:version(), status(), binary(), binary()}.
+parse_status_line(Data) -> cow_http1:parse_status_line(Data).
-spec status_to_integer(status() | binary()) -> status().
-status_to_integer(Status) when is_integer(Status) ->
- Status;
-status_to_integer(Status) ->
- case Status of
- <<H, T, U>> ->
- status_to_integer(H, T, U);
- <<H, T, U, " ", _/bits>> ->
- status_to_integer(H, T, U)
- end.
-
-status_to_integer(H, T, U)
- when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 ->
- (H - $0) * 100 + (T - $0) * 10 + (U - $0).
-
--ifdef(TEST).
-parse_status_line_test_() ->
- Tests = [
- {<<"HTTP/1.1 200 OK\r\nRest">>,
- {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}},
- {<<"HTTP/1.0 404 Not Found\r\nRest">>,
- {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}},
- {<<"HTTP/1.1 500 Something very funny here\r\nRest">>,
- {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}},
- {<<"HTTP/1.1 200 \r\nRest">>,
- {'HTTP/1.1', 200, <<>>, <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_status_line(V) end}
- || {V, R} <- Tests].
-
-parse_status_line_error_test_() ->
- Tests = [
- <<>>,
- <<"HTTP/1.1">>,
- <<"HTTP/1.1 200\r\n">>,
- <<"HTTP/1.1 200 OK">>,
- <<"HTTP/1.1 200 OK\r">>,
- <<"HTTP/1.1 200 OK\n">>,
- <<"HTTP/0.9 200 OK\r\n">>,
- <<"HTTP/1.1 42 Answer\r\n">>,
- <<"HTTP/1.1 999999999 More than OK\r\n">>,
- <<"content-type: text/plain\r\n">>,
- <<0:80, "\r\n">>
- ],
- [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end}
- || V <- Tests].
-
-horse_parse_status_line_200() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 200 OK\r\n">>)
- ).
+status_to_integer(Status) -> cow_http1:status_to_integer(Status).
-horse_parse_status_line_404() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>)
- ).
+-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
+parse_headers(Data) -> cow_http1:parse_headers(Data).
-horse_parse_status_line_500() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>)
- ).
+-spec parse_fullpath(binary()) -> {binary(), binary()}.
+parse_fullpath(Fullpath) -> cow_http1:parse_fullpath(Fullpath).
-horse_parse_status_line_other() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>)
- ).
--endif.
+-spec parse_version(binary()) -> cow_http1:version().
+parse_version(Data) -> cow_http1:parse_version(Data).
-%% @doc Parse the list of headers.
+-spec request(binary(), iodata(), cow_http1:version(), headers()) -> iodata().
+request(Method, Path, Version, Headers) -> cow_http1:request(Method, Path, Version, Headers).
--spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
-parse_headers(Data) ->
- parse_header(Data, []).
-
-parse_header(<< $\r, $\n, Rest/bits >>, Acc) ->
- {lists:reverse(Acc), Rest};
-parse_header(Data, Acc) ->
- parse_hd_name(Data, Acc, <<>>).
-
-parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) ->
- case C of
- $: -> parse_hd_before_value(Rest, Acc, SoFar);
- $\s -> parse_hd_name_ws(Rest, Acc, SoFar);
- $\t -> parse_hd_name_ws(Rest, Acc, SoFar);
- _ -> ?LOWER(parse_hd_name, Rest, Acc, SoFar)
- end.
+-spec response(status() | binary(), cow_http1:version(), headers()) -> iodata().
+response(Status, Version, Headers) -> cow_http1:response(Status, Version, Headers).
-parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) ->
- case C of
- $: -> parse_hd_before_value(Rest, Acc, Name);
- $\s -> parse_hd_name_ws(Rest, Acc, Name);
- $\t -> parse_hd_name_ws(Rest, Acc, Name)
+-spec headers(headers()) -> iodata().
+headers(Headers) -> cow_http1:headers(Headers).
+
+-spec version(cow_http1:version()) -> binary().
+version(Version) -> cow_http1:version(Version).
+
+%% Functions used by HTTP/2+.
+
+%% Semantic errors are common to all HTTP versions.
+
+-spec format_semantic_error(atom()) -> atom().
+
+format_semantic_error(connect_invalid_content_length_2xx) ->
+ 'Content-length header received in a 2xx response to a CONNECT request. (RFC7230 3.3.2).';
+format_semantic_error(invalid_content_length_header) ->
+ 'The content-length header is invalid. (RFC7230 3.3.2)';
+format_semantic_error(invalid_content_length_header_1xx) ->
+ 'Content-length header received in a 1xx response. (RFC7230 3.3.2)';
+format_semantic_error(invalid_content_length_header_204) ->
+ 'Content-length header received in a 204 response. (RFC7230 3.3.2)';
+format_semantic_error(multiple_content_length_headers) ->
+ 'Multiple content-length headers were received. (RFC7230 3.3.2)'.
+
+%% Merge pseudo headers at the start of headers.
+
+-spec merge_pseudo_headers(pseudo_headers(), headers()) -> headers().
+
+merge_pseudo_headers(PseudoHeaders, Headers0) ->
+ lists:foldl(fun
+ ({status, Status}, Acc) when is_integer(Status) ->
+ [{<<":status">>, integer_to_binary(Status)}|Acc];
+ ({Name, Value}, Acc) ->
+ [{iolist_to_binary([$:, atom_to_binary(Name, latin1)]), Value}|Acc]
+ end, Headers0, maps:to_list(PseudoHeaders)).
+
+%% Process HTTP/2+ headers. This is done after decoding them.
+
+-spec process_headers(headers(), request | push_promise | response | trailers,
+ binary() | undefined, fin(), #{enable_connect_protocol => boolean(), any() => any()})
+ -> {headers, headers(), pseudo_headers(), non_neg_integer() | undefined}
+ | {push_promise, headers(), pseudo_headers()}
+ | {trailers, headers()}
+ | {error, atom()}.
+
+process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings)
+ when Type =:= request; Type =:= push_promise ->
+ IsExtendedConnectEnabled = maps:get(enable_connect_protocol, LocalSettings, false),
+ case request_pseudo_headers(Headers0, #{}) of
+ %% Extended CONNECT method (HTTP/2: RFC8441, HTTP/3: RFC9220).
+ {ok, PseudoHeaders=#{method := <<"CONNECT">>, scheme := _,
+ authority := _, path := _, protocol := _}, Headers}
+ when IsExtendedConnectEnabled ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, #{method := <<"CONNECT">>, scheme := _,
+ authority := _, path := _}, _}
+ when IsExtendedConnectEnabled ->
+ {error, extended_connect_missing_protocol};
+ {ok, #{protocol := _}, _} ->
+ {error, invalid_protocol_pseudo_header};
+ %% Normal CONNECT (no scheme/path).
+ {ok, PseudoHeaders = #{method := <<"CONNECT">>, authority := _}, Headers}
+ when map_size(PseudoHeaders) =:= 2 ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, #{method := <<"CONNECT">>, authority := _}, _} ->
+ {error, connect_invalid_pseudo_header};
+ {ok, #{method := <<"CONNECT">>}, _} ->
+ {error, connect_missing_authority};
+ %% Other requests.
+ {ok, PseudoHeaders = #{method := _, scheme := _, path := _}, Headers} ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, _, _} ->
+ {error, missing_pseudo_header};
+ Error = {error, _} ->
+ Error
+ end;
+process_headers(Headers0, Type = response, ReqMethod, IsFin, _LocalSettings) ->
+ case response_pseudo_headers(Headers0, #{}) of
+ {ok, PseudoHeaders=#{status := _}, Headers} ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, _, _} ->
+ {error, missing_pseudo_header};
+ Error = {error, _} ->
+ Error
+ end;
+process_headers(Headers, Type = trailers, ReqMethod, IsFin, _LocalSettings) ->
+ case trailers_have_pseudo_headers(Headers) of
+ false ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, #{});
+ true ->
+ {error, trailer_invalid_pseudo_header}
end.
-parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) ->
- parse_hd_before_value(Rest, Acc, Name);
-parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) ->
- parse_hd_before_value(Rest, Acc, Name);
-parse_hd_before_value(Data, Acc, Name) ->
- parse_hd_value(Data, Acc, Name, <<>>).
-
-parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) ->
- case Rest of
- << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
- parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>);
- << $\n, Rest2/bits >> ->
- Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1),
- parse_header(Rest2, [{Name, Value}|Acc])
+request_pseudo_headers([{<<":method">>, _}|_], #{method := _}) ->
+ {error, multiple_method_pseudo_headers};
+request_pseudo_headers([{<<":method">>, Method}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{method => Method});
+request_pseudo_headers([{<<":scheme">>, _}|_], #{scheme := _}) ->
+ {error, multiple_scheme_pseudo_headers};
+request_pseudo_headers([{<<":scheme">>, Scheme}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{scheme => Scheme});
+request_pseudo_headers([{<<":authority">>, _}|_], #{authority := _}) ->
+ {error, multiple_authority_pseudo_headers};
+request_pseudo_headers([{<<":authority">>, Authority}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{authority => Authority});
+request_pseudo_headers([{<<":path">>, _}|_], #{path := _}) ->
+ {error, multiple_path_pseudo_headers};
+request_pseudo_headers([{<<":path">>, Path}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{path => Path});
+request_pseudo_headers([{<<":protocol">>, _}|_], #{protocol := _}) ->
+ {error, multiple_protocol_pseudo_headers};
+request_pseudo_headers([{<<":protocol">>, Protocol}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{protocol => Protocol});
+request_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, invalid_pseudo_header};
+request_pseudo_headers(Headers, PseudoHeaders) ->
+ {ok, PseudoHeaders, Headers}.
+
+response_pseudo_headers([{<<":status">>, _}|_], #{status := _}) ->
+ {error, multiple_status_pseudo_headers};
+response_pseudo_headers([{<<":status">>, Status}|Tail], PseudoHeaders) ->
+ try cow_http:status_to_integer(Status) of
+ IntStatus ->
+ response_pseudo_headers(Tail, PseudoHeaders#{status => IntStatus})
+ catch _:_ ->
+ {error, invalid_status_pseudo_header}
end;
-parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) ->
- parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>).
-
-%% This function has been copied from cowboy_http.
-clean_value_ws_end(_, -1) ->
- <<>>;
-clean_value_ws_end(Value, N) ->
- case binary:at(Value, N) of
- $\s -> clean_value_ws_end(Value, N - 1);
- $\t -> clean_value_ws_end(Value, N - 1);
- _ ->
- S = N + 1,
- << Value2:S/binary, _/bits >> = Value,
- Value2
+response_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, invalid_pseudo_header};
+response_pseudo_headers(Headers, PseudoHeaders) ->
+ {ok, PseudoHeaders, Headers}.
+
+trailers_have_pseudo_headers([]) ->
+ false;
+trailers_have_pseudo_headers([{<<":", _/bits>>, _}|_]) ->
+ true;
+trailers_have_pseudo_headers([_|Tail]) ->
+ trailers_have_pseudo_headers(Tail).
+
+%% Rejecting invalid regular headers might be a bit too strong for clients.
+regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders) ->
+ case regular_headers(Headers, Type) of
+ ok when Type =:= request ->
+ request_expected_size(Headers, IsFin, PseudoHeaders);
+ ok when Type =:= push_promise ->
+ return_push_promise(Headers, PseudoHeaders);
+ ok when Type =:= response ->
+ response_expected_size(Headers, ReqMethod, IsFin, PseudoHeaders);
+ ok when Type =:= trailers ->
+ return_trailers(Headers);
+ Error = {error, _} ->
+ Error
end.
--ifdef(TEST).
-parse_headers_test_() ->
- Tests = [
- {<<"\r\nRest">>,
- {[], <<"Rest">>}},
- {<<"Server: Erlang/R17 \r\n\r\n">>,
- {[{<<"server">>, <<"Erlang/R17">>}], <<>>}},
- {<<"Server: Erlang/R17\r\n"
- "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
- "Multiline-Header: why hello!\r\n"
- " I didn't see you all the way over there!\r\n"
- "Content-Length: 12\r\n"
- "Content-Type: text/plain\r\n"
- "\r\nRest">>,
- {[{<<"server">>, <<"Erlang/R17">>},
- {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>},
- {<<"multiline-header">>,
- <<"why hello! I didn't see you all the way over there!">>},
- {<<"content-length">>, <<"12">>},
- {<<"content-type">>, <<"text/plain">>}],
- <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_headers(V) end}
- || {V, R} <- Tests].
-
-parse_headers_error_test_() ->
- Tests = [
- <<>>,
- <<"\r">>,
- <<"Malformed\r\n\r\n">>,
- <<"content-type: text/plain\r\nMalformed\r\n\r\n">>,
- <<"HTTP/1.1 200 OK\r\n\r\n">>,
- <<0:80, "\r\n\r\n">>,
- <<"content-type: text/plain\r\ncontent-length: 12\r\n">>
+regular_headers([{<<>>, _}|_], _) ->
+ {error, empty_header_name};
+regular_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, pseudo_header_after_regular};
+regular_headers([{<<"connection">>, _}|_], _) ->
+ {error, invalid_connection_header};
+regular_headers([{<<"keep-alive">>, _}|_], _) ->
+ {error, invalid_keep_alive_header};
+regular_headers([{<<"proxy-authenticate">>, _}|_], _) ->
+ {error, invalid_proxy_authenticate_header};
+regular_headers([{<<"proxy-authorization">>, _}|_], _) ->
+ {error, invalid_proxy_authorization_header};
+regular_headers([{<<"transfer-encoding">>, _}|_], _) ->
+ {error, invalid_transfer_encoding_header};
+regular_headers([{<<"upgrade">>, _}|_], _) ->
+ {error, invalid_upgrade_header};
+regular_headers([{<<"te">>, Value}|_], request) when Value =/= <<"trailers">> ->
+ {error, invalid_te_value};
+regular_headers([{<<"te">>, _}|_], Type) when Type =/= request ->
+ {error, invalid_te_header};
+regular_headers([{Name, _}|Tail], Type) ->
+ Pattern = [
+ <<$A>>, <<$B>>, <<$C>>, <<$D>>, <<$E>>, <<$F>>, <<$G>>, <<$H>>, <<$I>>,
+ <<$J>>, <<$K>>, <<$L>>, <<$M>>, <<$N>>, <<$O>>, <<$P>>, <<$Q>>, <<$R>>,
+ <<$S>>, <<$T>>, <<$U>>, <<$V>>, <<$W>>, <<$X>>, <<$Y>>, <<$Z>>
],
- [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end}
- || V <- Tests].
-
-horse_parse_headers() ->
- horse:repeat(50000,
- parse_headers(<<"Server: Erlang/R17\r\n"
- "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
- "Multiline-Header: why hello!\r\n"
- " I didn't see you all the way over there!\r\n"
- "Content-Length: 12\r\n"
- "Content-Type: text/plain\r\n"
- "\r\nRest">>)
- ).
--endif.
-
-%% @doc Extract path and query string from a binary,
-%% removing any fragment component.
-
--spec parse_fullpath(binary()) -> {binary(), binary()}.
-parse_fullpath(Fullpath) ->
- parse_fullpath(Fullpath, <<>>).
-
-parse_fullpath(<<>>, Path) -> {Path, <<>>};
-parse_fullpath(<< $#, _/bits >>, Path) -> {Path, <<>>};
-parse_fullpath(<< $?, Qs/bits >>, Path) -> parse_fullpath_query(Qs, Path, <<>>);
-parse_fullpath(<< C, Rest/bits >>, SoFar) -> parse_fullpath(Rest, << SoFar/binary, C >>).
-
-parse_fullpath_query(<<>>, Path, Query) -> {Path, Query};
-parse_fullpath_query(<< $#, _/bits >>, Path, Query) -> {Path, Query};
-parse_fullpath_query(<< C, Rest/bits >>, Path, SoFar) ->
- parse_fullpath_query(Rest, Path, << SoFar/binary, C >>).
-
--ifdef(TEST).
-parse_fullpath_test() ->
- {<<"*">>, <<>>} = parse_fullpath(<<"*">>),
- {<<"/">>, <<>>} = parse_fullpath(<<"/">>),
- {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource#fragment">>),
- {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>),
- {<<"/">>, <<>>} = parse_fullpath(<<"/?">>),
- {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy#fragment">>),
- {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>),
- {<<"/path/to/resource">>, <<"q=cowboy">>}
- = parse_fullpath(<<"/path/to/resource?q=cowboy">>),
+ case binary:match(Name, Pattern) of
+ nomatch -> regular_headers(Tail, Type);
+ _ -> {error, uppercase_header_name}
+ end;
+regular_headers([], _) ->
ok.
--endif.
-%% @doc Convert an HTTP version to atom.
-
--spec parse_version(binary()) -> version().
-parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1';
-parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'.
+request_expected_size(Headers, IsFin, PseudoHeaders) ->
+ case [CL || {<<"content-length">>, CL} <- Headers] of
+ [] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [] ->
+ return_headers(Headers, PseudoHeaders, undefined);
+ [<<"0">>] ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when IsFin =:= fin ->
+ {error, non_zero_length_with_fin_flag};
+ [BinLen] ->
+ parse_expected_size(Headers, PseudoHeaders, BinLen);
+ _ ->
+ {error, multiple_content_length_headers}
+ end.
--ifdef(TEST).
-parse_version_test() ->
- 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>),
- 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>),
- {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)),
- ok.
--endif.
+response_expected_size(Headers, ReqMethod, IsFin, PseudoHeaders = #{status := Status}) ->
+ case [CL || {<<"content-length">>, CL} <- Headers] of
+ [] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [] ->
+ return_headers(Headers, PseudoHeaders, undefined);
+ [_] when Status >= 100, Status =< 199 ->
+ {error, invalid_content_length_header_1xx};
+ [_] when Status =:= 204 ->
+ {error, invalid_content_length_header_204};
+ [_] when Status >= 200, Status =< 299, ReqMethod =:= <<"CONNECT">> ->
+ {error, connect_invalid_content_length_2xx};
+ %% Responses to HEAD requests, and 304 responses may contain
+ %% a content-length header that must be ignored. (RFC7230 3.3.2)
+ [_] when ReqMethod =:= <<"HEAD">> ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when Status =:= 304 ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [<<"0">>] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when IsFin =:= fin ->
+ {error, non_zero_length_with_fin_flag};
+ [BinLen] ->
+ parse_expected_size(Headers, PseudoHeaders, BinLen);
+ _ ->
+ {error, multiple_content_length_headers}
+ end.
-%% @doc Return formatted request-line and headers.
-%% @todo Add tests when the corresponding reverse functions are added.
+parse_expected_size(Headers, PseudoHeaders, BinLen) ->
+ try cow_http_hd:parse_content_length(BinLen) of
+ Len ->
+ return_headers(Headers, PseudoHeaders, Len)
+ catch _:_ ->
+ {error, invalid_content_length_header}
+ end.
--spec request(binary(), iodata(), version(), headers()) -> iodata().
-request(Method, Path, Version, Headers) ->
- [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>,
- [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers],
- <<"\r\n">>].
+return_headers(Headers, PseudoHeaders, Len) ->
+ {headers, Headers, PseudoHeaders, Len}.
--spec response(status() | binary(), version(), headers()) -> iodata().
-response(Status, Version, Headers) ->
- [version(Version), <<" ">>, status(Status), <<"\r\n">>,
- headers(Headers), <<"\r\n">>].
+return_push_promise(Headers, PseudoHeaders) ->
+ {push_promise, Headers, PseudoHeaders}.
--spec headers(headers()) -> iodata().
-headers(Headers) ->
- [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers].
+return_trailers(Headers) ->
+ {trailers, Headers}.
-%% @doc Return the version as a binary.
+%% Remove HTTP/1-specific headers.
--spec version(version()) -> binary().
-version('HTTP/1.1') -> <<"HTTP/1.1">>;
-version('HTTP/1.0') -> <<"HTTP/1.0">>.
+-spec remove_http1_headers(headers()) -> headers().
--ifdef(TEST).
-version_test() ->
- <<"HTTP/1.1">> = version('HTTP/1.1'),
- <<"HTTP/1.0">> = version('HTTP/1.0'),
- {'EXIT', _} = (catch version('HTTP/1.2')),
- ok.
--endif.
-
-%% @doc Return the status code and string as binary.
-
--spec status(status() | binary()) -> binary().
-status(100) -> <<"100 Continue">>;
-status(101) -> <<"101 Switching Protocols">>;
-status(102) -> <<"102 Processing">>;
-status(103) -> <<"103 Early Hints">>;
-status(200) -> <<"200 OK">>;
-status(201) -> <<"201 Created">>;
-status(202) -> <<"202 Accepted">>;
-status(203) -> <<"203 Non-Authoritative Information">>;
-status(204) -> <<"204 No Content">>;
-status(205) -> <<"205 Reset Content">>;
-status(206) -> <<"206 Partial Content">>;
-status(207) -> <<"207 Multi-Status">>;
-status(208) -> <<"208 Already Reported">>;
-status(226) -> <<"226 IM Used">>;
-status(300) -> <<"300 Multiple Choices">>;
-status(301) -> <<"301 Moved Permanently">>;
-status(302) -> <<"302 Found">>;
-status(303) -> <<"303 See Other">>;
-status(304) -> <<"304 Not Modified">>;
-status(305) -> <<"305 Use Proxy">>;
-status(306) -> <<"306 Switch Proxy">>;
-status(307) -> <<"307 Temporary Redirect">>;
-status(308) -> <<"308 Permanent Redirect">>;
-status(400) -> <<"400 Bad Request">>;
-status(401) -> <<"401 Unauthorized">>;
-status(402) -> <<"402 Payment Required">>;
-status(403) -> <<"403 Forbidden">>;
-status(404) -> <<"404 Not Found">>;
-status(405) -> <<"405 Method Not Allowed">>;
-status(406) -> <<"406 Not Acceptable">>;
-status(407) -> <<"407 Proxy Authentication Required">>;
-status(408) -> <<"408 Request Timeout">>;
-status(409) -> <<"409 Conflict">>;
-status(410) -> <<"410 Gone">>;
-status(411) -> <<"411 Length Required">>;
-status(412) -> <<"412 Precondition Failed">>;
-status(413) -> <<"413 Request Entity Too Large">>;
-status(414) -> <<"414 Request-URI Too Long">>;
-status(415) -> <<"415 Unsupported Media Type">>;
-status(416) -> <<"416 Requested Range Not Satisfiable">>;
-status(417) -> <<"417 Expectation Failed">>;
-status(418) -> <<"418 I'm a teapot">>;
-status(421) -> <<"421 Misdirected Request">>;
-status(422) -> <<"422 Unprocessable Entity">>;
-status(423) -> <<"423 Locked">>;
-status(424) -> <<"424 Failed Dependency">>;
-status(425) -> <<"425 Unordered Collection">>;
-status(426) -> <<"426 Upgrade Required">>;
-status(428) -> <<"428 Precondition Required">>;
-status(429) -> <<"429 Too Many Requests">>;
-status(431) -> <<"431 Request Header Fields Too Large">>;
-status(451) -> <<"451 Unavailable For Legal Reasons">>;
-status(500) -> <<"500 Internal Server Error">>;
-status(501) -> <<"501 Not Implemented">>;
-status(502) -> <<"502 Bad Gateway">>;
-status(503) -> <<"503 Service Unavailable">>;
-status(504) -> <<"504 Gateway Timeout">>;
-status(505) -> <<"505 HTTP Version Not Supported">>;
-status(506) -> <<"506 Variant Also Negotiates">>;
-status(507) -> <<"507 Insufficient Storage">>;
-status(508) -> <<"508 Loop Detected">>;
-status(510) -> <<"510 Not Extended">>;
-status(511) -> <<"511 Network Authentication Required">>;
-status(B) when is_binary(B) -> B.
+remove_http1_headers(Headers) ->
+ RemoveHeaders0 = [
+ <<"keep-alive">>,
+ <<"proxy-connection">>,
+ <<"transfer-encoding">>,
+ <<"upgrade">>
+ ],
+ RemoveHeaders = case lists:keyfind(<<"connection">>, 1, Headers) of
+ false ->
+ RemoveHeaders0;
+ {_, ConnHd} ->
+ %% We do not need to worry about any "close" header because
+ %% that header name is reserved.
+ Connection = cow_http_hd:parse_connection(ConnHd),
+ Connection ++ [<<"connection">>|RemoveHeaders0]
+ end,
+ lists:filter(fun({Name, _}) ->
+ not lists:member(Name, RemoveHeaders)
+ end, Headers).
diff --git a/src/cow_http1.erl b/src/cow_http1.erl
new file mode 100644
index 0000000..24a8c88
--- /dev/null
+++ b/src/cow_http1.erl
@@ -0,0 +1,421 @@
+%% Copyright (c) 2013-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http1).
+
+-export([parse_request_line/1]).
+-export([parse_status_line/1]).
+-export([status_to_integer/1]).
+-export([parse_headers/1]).
+
+-export([parse_fullpath/1]).
+-export([parse_version/1]).
+
+-export([request/4]).
+-export([response/3]).
+-export([headers/1]).
+-export([version/1]).
+
+-type version() :: 'HTTP/1.0' | 'HTTP/1.1'.
+-export_type([version/0]).
+
+-include("cow_inline.hrl").
+
+%% @doc Parse the request line.
+
+-spec parse_request_line(binary()) -> {binary(), binary(), version(), binary()}.
+parse_request_line(Data) ->
+ {Pos, _} = binary:match(Data, <<"\r">>),
+ <<RequestLine:Pos/binary, "\r\n", Rest/bits>> = Data,
+ [Method, Target, Version0] = binary:split(RequestLine, <<$\s>>, [trim_all, global]),
+ Version = case Version0 of
+ <<"HTTP/1.1">> -> 'HTTP/1.1';
+ <<"HTTP/1.0">> -> 'HTTP/1.0'
+ end,
+ {Method, Target, Version, Rest}.
+
+-ifdef(TEST).
+parse_request_line_test_() ->
+ Tests = [
+ {<<"GET /path HTTP/1.0\r\nRest">>,
+ {<<"GET">>, <<"/path">>, 'HTTP/1.0', <<"Rest">>}},
+ {<<"GET /path HTTP/1.1\r\nRest">>,
+ {<<"GET">>, <<"/path">>, 'HTTP/1.1', <<"Rest">>}},
+ {<<"CONNECT proxy.example.org:1080 HTTP/1.1\r\nRest">>,
+ {<<"CONNECT">>, <<"proxy.example.org:1080">>, 'HTTP/1.1', <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_request_line(V) end}
+ || {V, R} <- Tests].
+
+parse_request_line_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"GET">>,
+ <<"GET /path\r\n">>,
+ <<"GET /path HTTP/1.1">>,
+ <<"GET /path HTTP/1.1\r">>,
+ <<"GET /path HTTP/1.1\n">>,
+ <<"GET /path HTTP/0.9\r\n">>,
+ <<"content-type: text/plain\r\n">>,
+ <<0:80, "\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_request_line(V)) end}
+ || V <- Tests].
+
+horse_parse_request_line_get_path() ->
+ horse:repeat(200000,
+ parse_request_line(<<"GET /path HTTP/1.1\r\n">>)
+ ).
+-endif.
+
+%% @doc Parse the status line.
+
+-spec parse_status_line(binary()) -> {version(), cow_http:status(), binary(), binary()}.
+parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 200, <<"OK">>, Rest};
+parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 404, <<"Not Found">>, Rest};
+parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest};
+parse_status_line(<< "HTTP/1.1 ", Status/bits >>) ->
+ parse_status_line(Status, 'HTTP/1.1');
+parse_status_line(<< "HTTP/1.0 ", Status/bits >>) ->
+ parse_status_line(Status, 'HTTP/1.0').
+
+parse_status_line(<<H, T, U, " ", Rest/bits>>, Version) ->
+ Status = status_to_integer(H, T, U),
+ {Pos, _} = binary:match(Rest, <<"\r">>),
+ << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest,
+ {Version, Status, StatusStr, Rest2}.
+
+-spec status_to_integer(cow_http:status() | binary()) -> cow_http:status().
+status_to_integer(Status) when is_integer(Status) ->
+ Status;
+status_to_integer(Status) ->
+ case Status of
+ <<H, T, U>> ->
+ status_to_integer(H, T, U);
+ <<H, T, U, " ", _/bits>> ->
+ status_to_integer(H, T, U)
+ end.
+
+status_to_integer(H, T, U)
+ when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 ->
+ (H - $0) * 100 + (T - $0) * 10 + (U - $0).
+
+-ifdef(TEST).
+parse_status_line_test_() ->
+ Tests = [
+ {<<"HTTP/1.1 200 OK\r\nRest">>,
+ {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}},
+ {<<"HTTP/1.0 404 Not Found\r\nRest">>,
+ {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}},
+ {<<"HTTP/1.1 500 Something very funny here\r\nRest">>,
+ {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}},
+ {<<"HTTP/1.1 200 \r\nRest">>,
+ {'HTTP/1.1', 200, <<>>, <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_status_line(V) end}
+ || {V, R} <- Tests].
+
+parse_status_line_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"HTTP/1.1">>,
+ <<"HTTP/1.1 200\r\n">>,
+ <<"HTTP/1.1 200 OK">>,
+ <<"HTTP/1.1 200 OK\r">>,
+ <<"HTTP/1.1 200 OK\n">>,
+ <<"HTTP/0.9 200 OK\r\n">>,
+ <<"HTTP/1.1 42 Answer\r\n">>,
+ <<"HTTP/1.1 999999999 More than OK\r\n">>,
+ <<"content-type: text/plain\r\n">>,
+ <<0:80, "\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end}
+ || V <- Tests].
+
+horse_parse_status_line_200() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 200 OK\r\n">>)
+ ).
+
+horse_parse_status_line_404() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>)
+ ).
+
+horse_parse_status_line_500() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>)
+ ).
+
+horse_parse_status_line_other() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>)
+ ).
+-endif.
+
+%% @doc Parse the list of headers.
+
+-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
+parse_headers(Data) ->
+ parse_header(Data, []).
+
+parse_header(<< $\r, $\n, Rest/bits >>, Acc) ->
+ {lists:reverse(Acc), Rest};
+parse_header(Data, Acc) ->
+ parse_hd_name(Data, Acc, <<>>).
+
+parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) ->
+ case C of
+ $: -> parse_hd_before_value(Rest, Acc, SoFar);
+ $\s -> parse_hd_name_ws(Rest, Acc, SoFar);
+ $\t -> parse_hd_name_ws(Rest, Acc, SoFar);
+ _ -> ?LOWER(parse_hd_name, Rest, Acc, SoFar)
+ end.
+
+parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) ->
+ case C of
+ $: -> parse_hd_before_value(Rest, Acc, Name);
+ $\s -> parse_hd_name_ws(Rest, Acc, Name);
+ $\t -> parse_hd_name_ws(Rest, Acc, Name)
+ end.
+
+parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) ->
+ parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) ->
+ parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(Data, Acc, Name) ->
+ parse_hd_value(Data, Acc, Name, <<>>).
+
+parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) ->
+ case Rest of
+ << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
+ parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>);
+ << $\n, Rest2/bits >> ->
+ Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1),
+ parse_header(Rest2, [{Name, Value}|Acc])
+ end;
+parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) ->
+ parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>).
+
+%% This function has been copied from cowboy_http.
+clean_value_ws_end(_, -1) ->
+ <<>>;
+clean_value_ws_end(Value, N) ->
+ case binary:at(Value, N) of
+ $\s -> clean_value_ws_end(Value, N - 1);
+ $\t -> clean_value_ws_end(Value, N - 1);
+ _ ->
+ S = N + 1,
+ << Value2:S/binary, _/bits >> = Value,
+ Value2
+ end.
+
+-ifdef(TEST).
+parse_headers_test_() ->
+ Tests = [
+ {<<"\r\nRest">>,
+ {[], <<"Rest">>}},
+ {<<"Server: Erlang/R17 \r\n\r\n">>,
+ {[{<<"server">>, <<"Erlang/R17">>}], <<>>}},
+ {<<"Server: Erlang/R17\r\n"
+ "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+ "Multiline-Header: why hello!\r\n"
+ " I didn't see you all the way over there!\r\n"
+ "Content-Length: 12\r\n"
+ "Content-Type: text/plain\r\n"
+ "\r\nRest">>,
+ {[{<<"server">>, <<"Erlang/R17">>},
+ {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>},
+ {<<"multiline-header">>,
+ <<"why hello! I didn't see you all the way over there!">>},
+ {<<"content-length">>, <<"12">>},
+ {<<"content-type">>, <<"text/plain">>}],
+ <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_headers(V) end}
+ || {V, R} <- Tests].
+
+parse_headers_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"\r">>,
+ <<"Malformed\r\n\r\n">>,
+ <<"content-type: text/plain\r\nMalformed\r\n\r\n">>,
+ <<"HTTP/1.1 200 OK\r\n\r\n">>,
+ <<0:80, "\r\n\r\n">>,
+ <<"content-type: text/plain\r\ncontent-length: 12\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end}
+ || V <- Tests].
+
+horse_parse_headers() ->
+ horse:repeat(50000,
+ parse_headers(<<"Server: Erlang/R17\r\n"
+ "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+ "Multiline-Header: why hello!\r\n"
+ " I didn't see you all the way over there!\r\n"
+ "Content-Length: 12\r\n"
+ "Content-Type: text/plain\r\n"
+ "\r\nRest">>)
+ ).
+-endif.
+
+%% @doc Extract path and query string from a binary,
+%% removing any fragment component.
+
+-spec parse_fullpath(binary()) -> {binary(), binary()}.
+parse_fullpath(Fullpath) ->
+ parse_fullpath(Fullpath, <<>>).
+
+parse_fullpath(<<>>, Path) -> {Path, <<>>};
+parse_fullpath(<< $#, _/bits >>, Path) -> {Path, <<>>};
+parse_fullpath(<< $?, Qs/bits >>, Path) -> parse_fullpath_query(Qs, Path, <<>>);
+parse_fullpath(<< C, Rest/bits >>, SoFar) -> parse_fullpath(Rest, << SoFar/binary, C >>).
+
+parse_fullpath_query(<<>>, Path, Query) -> {Path, Query};
+parse_fullpath_query(<< $#, _/bits >>, Path, Query) -> {Path, Query};
+parse_fullpath_query(<< C, Rest/bits >>, Path, SoFar) ->
+ parse_fullpath_query(Rest, Path, << SoFar/binary, C >>).
+
+-ifdef(TEST).
+parse_fullpath_test() ->
+ {<<"*">>, <<>>} = parse_fullpath(<<"*">>),
+ {<<"/">>, <<>>} = parse_fullpath(<<"/">>),
+ {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource#fragment">>),
+ {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>),
+ {<<"/">>, <<>>} = parse_fullpath(<<"/?">>),
+ {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy#fragment">>),
+ {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>),
+ {<<"/path/to/resource">>, <<"q=cowboy">>}
+ = parse_fullpath(<<"/path/to/resource?q=cowboy">>),
+ ok.
+-endif.
+
+%% @doc Convert an HTTP version to atom.
+
+-spec parse_version(binary()) -> version().
+parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1';
+parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'.
+
+-ifdef(TEST).
+parse_version_test() ->
+ 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>),
+ 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>),
+ {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)),
+ ok.
+-endif.
+
+%% @doc Return formatted request-line and headers.
+%% @todo Add tests when the corresponding reverse functions are added.
+
+-spec request(binary(), iodata(), version(), cow_http:headers()) -> iodata().
+request(Method, Path, Version, Headers) ->
+ [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>,
+ [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers],
+ <<"\r\n">>].
+
+-spec response(cow_http:status() | binary(), version(), cow_http:headers())
+ -> iodata().
+response(Status, Version, Headers) ->
+ [version(Version), <<" ">>, status(Status), <<"\r\n">>,
+ headers(Headers), <<"\r\n">>].
+
+-spec headers(cow_http:headers()) -> iodata().
+headers(Headers) ->
+ [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers].
+
+%% @doc Return the version as a binary.
+
+-spec version(version()) -> binary().
+version('HTTP/1.1') -> <<"HTTP/1.1">>;
+version('HTTP/1.0') -> <<"HTTP/1.0">>.
+
+-ifdef(TEST).
+version_test() ->
+ <<"HTTP/1.1">> = version('HTTP/1.1'),
+ <<"HTTP/1.0">> = version('HTTP/1.0'),
+ {'EXIT', _} = (catch version('HTTP/1.2')),
+ ok.
+-endif.
+
+%% @doc Return the status code and string as binary.
+
+-spec status(cow_http:status() | binary()) -> binary().
+status(100) -> <<"100 Continue">>;
+status(101) -> <<"101 Switching Protocols">>;
+status(102) -> <<"102 Processing">>;
+status(103) -> <<"103 Early Hints">>;
+status(200) -> <<"200 OK">>;
+status(201) -> <<"201 Created">>;
+status(202) -> <<"202 Accepted">>;
+status(203) -> <<"203 Non-Authoritative Information">>;
+status(204) -> <<"204 No Content">>;
+status(205) -> <<"205 Reset Content">>;
+status(206) -> <<"206 Partial Content">>;
+status(207) -> <<"207 Multi-Status">>;
+status(208) -> <<"208 Already Reported">>;
+status(226) -> <<"226 IM Used">>;
+status(300) -> <<"300 Multiple Choices">>;
+status(301) -> <<"301 Moved Permanently">>;
+status(302) -> <<"302 Found">>;
+status(303) -> <<"303 See Other">>;
+status(304) -> <<"304 Not Modified">>;
+status(305) -> <<"305 Use Proxy">>;
+status(306) -> <<"306 Switch Proxy">>;
+status(307) -> <<"307 Temporary Redirect">>;
+status(308) -> <<"308 Permanent Redirect">>;
+status(400) -> <<"400 Bad Request">>;
+status(401) -> <<"401 Unauthorized">>;
+status(402) -> <<"402 Payment Required">>;
+status(403) -> <<"403 Forbidden">>;
+status(404) -> <<"404 Not Found">>;
+status(405) -> <<"405 Method Not Allowed">>;
+status(406) -> <<"406 Not Acceptable">>;
+status(407) -> <<"407 Proxy Authentication Required">>;
+status(408) -> <<"408 Request Timeout">>;
+status(409) -> <<"409 Conflict">>;
+status(410) -> <<"410 Gone">>;
+status(411) -> <<"411 Length Required">>;
+status(412) -> <<"412 Precondition Failed">>;
+status(413) -> <<"413 Request Entity Too Large">>;
+status(414) -> <<"414 Request-URI Too Long">>;
+status(415) -> <<"415 Unsupported Media Type">>;
+status(416) -> <<"416 Requested Range Not Satisfiable">>;
+status(417) -> <<"417 Expectation Failed">>;
+status(418) -> <<"418 I'm a teapot">>;
+status(421) -> <<"421 Misdirected Request">>;
+status(422) -> <<"422 Unprocessable Entity">>;
+status(423) -> <<"423 Locked">>;
+status(424) -> <<"424 Failed Dependency">>;
+status(425) -> <<"425 Unordered Collection">>;
+status(426) -> <<"426 Upgrade Required">>;
+status(428) -> <<"428 Precondition Required">>;
+status(429) -> <<"429 Too Many Requests">>;
+status(431) -> <<"431 Request Header Fields Too Large">>;
+status(451) -> <<"451 Unavailable For Legal Reasons">>;
+status(500) -> <<"500 Internal Server Error">>;
+status(501) -> <<"501 Not Implemented">>;
+status(502) -> <<"502 Bad Gateway">>;
+status(503) -> <<"503 Service Unavailable">>;
+status(504) -> <<"504 Gateway Timeout">>;
+status(505) -> <<"505 HTTP Version Not Supported">>;
+status(506) -> <<"506 Variant Also Negotiates">>;
+status(507) -> <<"507 Insufficient Storage">>;
+status(508) -> <<"508 Loop Detected">>;
+status(510) -> <<"510 Not Extended">>;
+status(511) -> <<"511 Network Authentication Required">>;
+status(B) when is_binary(B) -> B.
diff --git a/src/cow_http2.erl b/src/cow_http2.erl
index 2925e37..68f3625 100644
--- a/src/cow_http2.erl
+++ b/src/cow_http2.erl
@@ -39,9 +39,6 @@
-type streamid() :: pos_integer().
-export_type([streamid/0]).
--type fin() :: fin | nofin.
--export_type([fin/0]).
-
-type head_fin() :: head_fin | head_nofin.
-export_type([head_fin/0]).
@@ -66,9 +63,10 @@
| unknown_error.
-export_type([error/0]).
--type frame() :: {data, streamid(), fin(), binary()}
- | {headers, streamid(), fin(), head_fin(), binary()}
- | {headers, streamid(), fin(), head_fin(), exclusive(), streamid(), weight(), binary()}
+-type frame() :: {data, streamid(), cow_http:fin(), binary()}
+ | {headers, streamid(), cow_http:fin(), head_fin(), binary()}
+ | {headers, streamid(), cow_http:fin(), head_fin(),
+ exclusive(), streamid(), weight(), binary()}
| {priority, streamid(), exclusive(), streamid(), weight()}
| {rst_stream, streamid(), error()}
| {settings, settings()}
diff --git a/src/cow_http2_machine.erl b/src/cow_http2_machine.erl
index 69df267..808c6cf 100644
--- a/src/cow_http2_machine.erl
+++ b/src/cow_http2_machine.erl
@@ -76,19 +76,19 @@
method = undefined :: binary(),
%% Whether we finished sending data.
- local = idle :: idle | cow_http2:fin(),
+ local = idle :: idle | cow_http:fin(),
%% Local flow control window (how much we can send).
local_window :: integer(),
%% Buffered data waiting for the flow control window to increase.
local_buffer = queue:new() ::
- queue:queue({cow_http2:fin(), non_neg_integer(), {data, iodata()} | #sendfile{}}),
+ queue:queue({cow_http:fin(), non_neg_integer(), {data, iodata()} | #sendfile{}}),
local_buffer_size = 0 :: non_neg_integer(),
local_trailers = undefined :: undefined | cow_http:headers(),
%% Whether we finished receiving data.
- remote = idle :: idle | cow_http2:fin(),
+ remote = idle :: idle | cow_http:fin(),
%% Remote flow control window (how much we accept to receive).
remote_window :: integer(),
@@ -105,7 +105,7 @@
-type stream() :: #stream{}.
-type continued_frame() ::
- {headers, cow_http2:streamid(), cow_http2:fin(), cow_http2:head_fin(), binary()} |
+ {headers, cow_http2:streamid(), cow_http:fin(), cow_http2:head_fin(), binary()} |
{push_promise, cow_http2:streamid(), cow_http2:head_fin(), cow_http2:streamid(), binary()}.
-record(http2_machine, {
@@ -134,8 +134,9 @@
initial_window_size => 65535
% max_frame_size => 16384
% max_header_list_size => infinity
+% enable_connect_protocol => false
} :: map(),
- next_settings = undefined :: undefined | map(),
+ next_settings = #{} :: map(),
remote_settings = #{
initial_window_size => 65535
} :: map(),
@@ -171,20 +172,6 @@
-opaque http2_machine() :: #http2_machine{}.
-export_type([http2_machine/0]).
--type pseudo_headers() :: #{} %% Trailers
- | #{ %% Responses.
- status := cow_http:status()
- } | #{ %% Normal CONNECT requests.
- method := binary(),
- authority := binary()
- } | #{ %% Other requests and extended CONNECT requests.
- method := binary(),
- scheme := binary(),
- authority := binary(),
- path := binary(),
- protocol => binary()
- }.
-
%% Returns true when the given StreamID is for a local-initiated stream.
-define(IS_SERVER_LOCAL(StreamID), ((StreamID rem 2) =:= 0)).
-define(IS_CLIENT_LOCAL(StreamID), ((StreamID rem 2) =:= 1)).
@@ -292,15 +279,16 @@ init_upgrade_stream(Method, State=#http2_machine{mode=server, remote_streamid=0,
-spec frame(cow_http2:frame(), State)
-> {ok, State}
- | {ok, {data, cow_http2:streamid(), cow_http2:fin(), binary()}, State}
- | {ok, {headers, cow_http2:streamid(), cow_http2:fin(),
- cow_http:headers(), pseudo_headers(), non_neg_integer() | undefined}, State}
+ | {ok, {data, cow_http2:streamid(), cow_http:fin(), binary()}, State}
+ | {ok, {headers, cow_http2:streamid(), cow_http:fin(),
+ cow_http:headers(), cow_http:pseudo_headers(),
+ non_neg_integer() | undefined}, State}
| {ok, {trailers, cow_http2:streamid(), cow_http:headers()}, State}
| {ok, {rst_stream, cow_http2:streamid(), cow_http2:error()}, State}
| {ok, {push_promise, cow_http2:streamid(), cow_http2:streamid(),
- cow_http:headers(), pseudo_headers()}, State}
+ cow_http:headers(), cow_http:pseudo_headers()}, State}
| {ok, {goaway, cow_http2:streamid(), cow_http2:error(), binary()}, State}
- | {send, [{cow_http2:streamid(), cow_http2:fin(),
+ | {send, [{cow_http2:streamid(), cow_http:fin(),
[{data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}]}], State}
| {error, {stream_error, cow_http2:streamid(), cow_http2:error(), atom()}, State}
| {error, {connection_error, cow_http2:error(), atom()}, State}
@@ -434,7 +422,7 @@ is_body_size_valid(_) ->
%% The order of the fields matter.
-record(headers, {
id :: cow_http2:streamid(),
- fin :: cow_http2:fin(),
+ fin :: cow_http:fin(),
head :: cow_http2:head_fin(),
data :: binary()
}).
@@ -444,8 +432,8 @@ headers_frame(Frame=#headers{}, State=#http2_machine{mode=Mode}) ->
server -> server_headers_frame(Frame, State);
client -> client_headers_frame(Frame, State)
end;
-%% @todo Handle the PRIORITY data, but only if this returns an ok tuple.
-%% @todo Do not lose the PRIORITY information if CONTINUATION frames follow.
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it.
headers_frame({headers, StreamID, IsFin, IsHeadFin,
_IsExclusive, _DepStreamID, _Weight, HeaderData},
State=#http2_machine{mode=Mode}) ->
@@ -536,7 +524,7 @@ headers_decode(Frame=#headers{head=head_fin, data=HeaderData},
headers_enforce_concurrency_limit(Frame,
State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers);
{Headers, DecodeState} ->
- headers_pseudo_headers(Frame,
+ headers_process(Frame,
State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers)
catch _:_ ->
{error, {connection_error, compression_error,
@@ -552,239 +540,95 @@ headers_enforce_concurrency_limit(Frame=#headers{id=StreamID},
%% in the Streams variable yet and so we'll end up with +1 stream.
case map_size(Streams) < MaxConcurrentStreams of
true ->
- headers_pseudo_headers(Frame, State, Type, Stream, Headers);
+ headers_process(Frame, State, Type, Stream, Headers);
false ->
{error, {stream_error, StreamID, refused_stream,
'Maximum number of concurrent streams has been reached. (RFC7540 5.1.2)'},
State}
end.
-headers_pseudo_headers(Frame, State=#http2_machine{local_settings=LocalSettings},
- Type, Stream, Headers0) when Type =:= request; Type =:= push_promise ->
- IsExtendedConnectEnabled = maps:get(enable_connect_protocol, LocalSettings, false),
- case request_pseudo_headers(Headers0, #{}) of
- %% Extended CONNECT method (RFC8441).
- {ok, PseudoHeaders=#{method := <<"CONNECT">>, scheme := _,
- authority := _, path := _, protocol := _}, Headers}
- when IsExtendedConnectEnabled ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, #{method := <<"CONNECT">>, scheme := _,
- authority := _, path := _}, _}
- when IsExtendedConnectEnabled ->
- headers_malformed(Frame, State,
- 'The :protocol pseudo-header MUST be sent with an extended CONNECT. (RFC8441 4)');
- {ok, #{protocol := _}, _} ->
- headers_malformed(Frame, State,
- 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC8441 4)');
- %% Normal CONNECT (no scheme/path).
- {ok, PseudoHeaders=#{method := <<"CONNECT">>, authority := _}, Headers}
- when map_size(PseudoHeaders) =:= 2 ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, #{method := <<"CONNECT">>}, _} ->
- headers_malformed(Frame, State,
- 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC7540 8.3)');
- %% Other requests.
- {ok, PseudoHeaders=#{method := _, scheme := _, path := _}, Headers} ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, _, _} ->
- headers_malformed(Frame, State,
- 'A required pseudo-header was not found. (RFC7540 8.1.2.3)');
- {error, HumanReadable} ->
- headers_malformed(Frame, State, HumanReadable)
- end;
-headers_pseudo_headers(Frame=#headers{id=StreamID},
- State, Type=response, Stream, Headers0) ->
- case response_pseudo_headers(Headers0, #{}) of
- {ok, PseudoHeaders=#{status := _}, Headers} ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, _, _} ->
- stream_reset(StreamID, State, protocol_error,
- 'A required pseudo-header was not found. (RFC7540 8.1.2.4)');
- {error, HumanReadable} ->
- stream_reset(StreamID, State, protocol_error, HumanReadable)
- end;
-headers_pseudo_headers(Frame=#headers{id=StreamID},
- State, Type=trailers, Stream, Headers) ->
- case trailers_contain_pseudo_headers(Headers) of
- false ->
- headers_regular_headers(Frame, State, Type, Stream, #{}, Headers);
- true ->
- stream_reset(StreamID, State, protocol_error,
- 'Trailer header blocks must not contain pseudo-headers. (RFC7540 8.1.2.1)')
+headers_process(Frame=#headers{id=StreamID, fin=IsFin},
+ State=#http2_machine{local_settings=LocalSettings},
+ Type, Stream, Headers0) ->
+ ReqMethod = case Stream of
+ #stream{method=ReqMethod0} -> ReqMethod0;
+ undefined -> undefined
+ end,
+ case cow_http:process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings) of
+ {headers, Headers, PseudoHeaders, Len} ->
+ headers_frame(Frame, State, Type, Stream, Headers, PseudoHeaders, Len);
+ {push_promise, Headers, PseudoHeaders} ->
+ push_promise_frame(Frame, State, Stream, Headers, PseudoHeaders);
+ {trailers, Headers} ->
+ trailers_frame(Frame, State, Stream, Headers);
+ {error, Reason} when Type =:= request ->
+ headers_malformed(Frame, State, format_error(Reason));
+ {error, Reason} ->
+ stream_reset(StreamID, State, protocol_error, format_error(Reason))
end.
headers_malformed(#headers{id=StreamID}, State, HumanReadable) ->
{error, {stream_error, StreamID, protocol_error, HumanReadable}, State}.
-request_pseudo_headers([{<<":method">>, _}|_], #{method := _}) ->
- {error, 'Multiple :method pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":method">>, Method}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{method => Method});
-request_pseudo_headers([{<<":scheme">>, _}|_], #{scheme := _}) ->
- {error, 'Multiple :scheme pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":scheme">>, Scheme}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{scheme => Scheme});
-request_pseudo_headers([{<<":authority">>, _}|_], #{authority := _}) ->
- {error, 'Multiple :authority pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":authority">>, Authority}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{authority => Authority});
-request_pseudo_headers([{<<":path">>, _}|_], #{path := _}) ->
- {error, 'Multiple :path pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":path">>, Path}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{path => Path});
-request_pseudo_headers([{<<":protocol">>, _}|_], #{protocol := _}) ->
- {error, 'Multiple :protocol pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":protocol">>, Protocol}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{protocol => Protocol});
-request_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'};
-request_pseudo_headers(Headers, PseudoHeaders) ->
- {ok, PseudoHeaders, Headers}.
-
-response_pseudo_headers([{<<":status">>, _}|_], #{status := _}) ->
- {error, 'Multiple :status pseudo-headers were found. (RFC7540 8.1.2.3)'};
-response_pseudo_headers([{<<":status">>, Status}|Tail], PseudoHeaders) ->
- try cow_http:status_to_integer(Status) of
- IntStatus ->
- response_pseudo_headers(Tail, PseudoHeaders#{status => IntStatus})
- catch _:_ ->
- {error, 'The :status pseudo-header value is invalid. (RFC7540 8.1.2.4)'}
- end;
-response_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'};
-response_pseudo_headers(Headers, PseudoHeaders) ->
- {ok, PseudoHeaders, Headers}.
-
-trailers_contain_pseudo_headers([]) ->
- false;
-trailers_contain_pseudo_headers([{<<":", _/bits>>, _}|_]) ->
- true;
-trailers_contain_pseudo_headers([_|Tail]) ->
- trailers_contain_pseudo_headers(Tail).
-
-%% Rejecting invalid regular headers might be a bit too strong for clients.
-headers_regular_headers(Frame=#headers{id=StreamID},
- State, Type, Stream, PseudoHeaders, Headers) ->
- case regular_headers(Headers, Type) of
- ok when Type =:= request ->
- request_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers);
- ok when Type =:= push_promise ->
- push_promise_frame(Frame, State, Stream, PseudoHeaders, Headers);
- ok when Type =:= response ->
- response_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers);
- ok when Type =:= trailers ->
- trailers_frame(Frame, State, Stream, Headers);
- {error, HumanReadable} when Type =:= request ->
- headers_malformed(Frame, State, HumanReadable);
- {error, HumanReadable} ->
- stream_reset(StreamID, State, protocol_error, HumanReadable)
- end.
-
-regular_headers([{<<>>, _}|_], _) ->
- {error, 'Empty header names are not valid regular headers. (CVE-2019-9516)'};
-regular_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'Pseudo-headers were found after regular headers. (RFC7540 8.1.2.1)'};
-regular_headers([{<<"connection">>, _}|_], _) ->
- {error, 'The connection header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"keep-alive">>, _}|_], _) ->
- {error, 'The keep-alive header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"proxy-authenticate">>, _}|_], _) ->
- {error, 'The proxy-authenticate header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"proxy-authorization">>, _}|_], _) ->
- {error, 'The proxy-authorization header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"transfer-encoding">>, _}|_], _) ->
- {error, 'The transfer-encoding header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"upgrade">>, _}|_], _) ->
- {error, 'The upgrade header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"te">>, Value}|_], request) when Value =/= <<"trailers">> ->
- {error, 'The te header with a value other than "trailers" is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"te">>, _}|_], Type) when Type =/= request ->
- {error, 'The te header is only allowed in request headers. (RFC7540 8.1.2.2)'};
-regular_headers([{Name, _}|Tail], Type) ->
- Pattern = [
- <<$A>>, <<$B>>, <<$C>>, <<$D>>, <<$E>>, <<$F>>, <<$G>>, <<$H>>, <<$I>>,
- <<$J>>, <<$K>>, <<$L>>, <<$M>>, <<$N>>, <<$O>>, <<$P>>, <<$Q>>, <<$R>>,
- <<$S>>, <<$T>>, <<$U>>, <<$V>>, <<$W>>, <<$X>>, <<$Y>>, <<$Z>>
- ],
- case binary:match(Name, Pattern) of
- nomatch -> regular_headers(Tail, Type);
- _ -> {error, 'Header names must be lowercase. (RFC7540 8.1.2)'}
- end;
-regular_headers([], _) ->
- ok.
-
-request_expected_size(Frame=#headers{fin=IsFin}, State, Type, Stream, PseudoHeaders, Headers) ->
- case [CL || {<<"content-length">>, CL} <- Headers] of
- [] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [] ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined);
- [<<"0">>] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when IsFin =:= fin ->
- headers_malformed(Frame, State,
- 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)');
- [BinLen] ->
- headers_parse_expected_size(Frame, State, Type, Stream,
- PseudoHeaders, Headers, BinLen);
- _ ->
- headers_malformed(Frame, State,
- 'Multiple content-length headers were received. (RFC7230 3.3.2)')
- end.
-
-response_expected_size(Frame=#headers{id=StreamID, fin=IsFin}, State, Type,
- Stream=#stream{method=Method}, PseudoHeaders=#{status := Status}, Headers) ->
- case [CL || {<<"content-length">>, CL} <- Headers] of
- [] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [] ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined);
- [_] when Status >= 100, Status =< 199 ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 1xx response. (RFC7230 3.3.2)');
- [_] when Status =:= 204 ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 204 response. (RFC7230 3.3.2)');
- [_] when Status >= 200, Status =< 299, Method =:= <<"CONNECT">> ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 2xx response to a CONNECT request. (RFC7230 3.3.2).');
- %% Responses to HEAD requests, and 304 responses may contain
- %% a content-length header that must be ignored. (RFC7230 3.3.2)
- [_] when Method =:= <<"HEAD">> ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when Status =:= 304 ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [<<"0">>] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when IsFin =:= fin ->
- stream_reset(StreamID, State, protocol_error,
- 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)');
- [BinLen] ->
- headers_parse_expected_size(Frame, State, Type, Stream,
- PseudoHeaders, Headers, BinLen);
- _ ->
- stream_reset(StreamID, State, protocol_error,
- 'Multiple content-length headers were received. (RFC7230 3.3.2)')
- end.
-
-headers_parse_expected_size(Frame=#headers{id=StreamID},
- State, Type, Stream, PseudoHeaders, Headers, BinLen) ->
- try cow_http_hd:parse_content_length(BinLen) of
- Len ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, Len)
- catch
- _:_ ->
- HumanReadable = 'The content-length header is invalid. (RFC7230 3.3.2)',
- case Type of
- request -> headers_malformed(Frame, State, HumanReadable);
- response -> stream_reset(StreamID, State, protocol_error, HumanReadable)
- end
- end.
+format_error(connect_invalid_pseudo_header) ->
+ 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC7540 8.3)';
+format_error(connect_missing_authority) ->
+ 'CONNECT requests must include the :authority pseudo-header. (RFC7540 8.3)';
+format_error(empty_header_name) ->
+ 'Empty header names are not valid regular headers. (CVE-2019-9516)';
+format_error(extended_connect_missing_protocol) ->
+ 'The :protocol pseudo-header MUST be sent with an extended CONNECT. (RFC8441 4)';
+format_error(invalid_connection_header) ->
+ 'The connection header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_keep_alive_header) ->
+ 'The keep-alive header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_protocol_pseudo_header) ->
+ 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC8441 4)';
+format_error(invalid_proxy_authenticate_header) ->
+ 'The proxy-authenticate header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_proxy_authorization_header) ->
+ 'The proxy-authorization header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_pseudo_header) ->
+ 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)';
+format_error(invalid_status_pseudo_header) ->
+ 'The :status pseudo-header value is invalid. (RFC7540 8.1.2.4)';
+format_error(invalid_te_header) ->
+ 'The te header is only allowed in request headers. (RFC7540 8.1.2.2)';
+format_error(invalid_te_value) ->
+ 'The te header with a value other than "trailers" is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_transfer_encoding_header) ->
+ 'The transfer-encoding header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_upgrade_header) ->
+ 'The upgrade header is not allowed. (RFC7540 8.1.2.2)';
+format_error(missing_pseudo_header) ->
+ 'A required pseudo-header was not found. (RFC7540 8.1.2.3, RFC7540 8.1.2.4)';
+format_error(multiple_authority_pseudo_headers) ->
+ 'Multiple :authority pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_method_pseudo_headers) ->
+ 'Multiple :method pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_path_pseudo_headers) ->
+ 'Multiple :path pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_protocol_pseudo_headers) ->
+ 'Multiple :protocol pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_scheme_pseudo_headers) ->
+ 'Multiple :scheme pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_status_pseudo_headers) ->
+ 'Multiple :status pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(non_zero_length_with_fin_flag) ->
+ 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)';
+format_error(pseudo_header_after_regular) ->
+ 'Pseudo-headers were found after regular headers. (RFC7540 8.1.2.1)';
+format_error(trailer_invalid_pseudo_header) ->
+ 'Trailer header blocks must not contain pseudo-headers. (RFC7540 8.1.2.1)';
+format_error(uppercase_header_name) ->
+ 'Header names must be lowercase. (RFC7540 8.1.2)';
+format_error(Reason) ->
+ cow_http:format_semantic_error(Reason).
headers_frame(#headers{id=StreamID, fin=IsFin}, State0=#http2_machine{
local_settings=#{initial_window_size := RemoteWindow},
remote_settings=#{initial_window_size := LocalWindow}},
- Type, Stream0, PseudoHeaders, Headers, Len) ->
+ Type, Stream0, Headers, PseudoHeaders, Len) ->
{Stream, State1} = case Type of
request ->
TE = case lists:keyfind(<<"te">>, 1, Headers) of
@@ -818,7 +662,8 @@ trailers_frame(#headers{id=StreamID}, State0, Stream0, Headers) ->
%% PRIORITY frame.
%%
-%% @todo Handle PRIORITY frames.
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it.
priority_frame(_Frame, State) ->
{ok, State}.
@@ -967,7 +812,7 @@ push_promise_frame(#headers{id=PromisedStreamID},
State0=#http2_machine{
local_settings=#{initial_window_size := RemoteWindow},
remote_settings=#{initial_window_size := LocalWindow}},
- #stream{id=StreamID}, PseudoHeaders=#{method := Method}, Headers) ->
+ #stream{id=StreamID}, Headers, PseudoHeaders=#{method := Method}) ->
TE = case lists:keyfind(<<"te">>, 1, Headers) of
{_, TE0} -> TE0;
false -> undefined
@@ -1141,9 +986,9 @@ timeout(_, _, State) ->
%% this module does not send data directly, instead it returns
%% a value that can then be used to send the frames.
--spec prepare_headers(cow_http2:streamid(), State, idle | cow_http2:fin(),
- pseudo_headers(), cow_http:headers())
- -> {ok, cow_http2:fin(), iodata(), State} when State::http2_machine().
+-spec prepare_headers(cow_http2:streamid(), State, idle | cow_http:fin(),
+ cow_http:pseudo_headers(), cow_http:headers())
+ -> {ok, cow_http:fin(), iodata(), State} when State::http2_machine().
prepare_headers(StreamID, State=#http2_machine{encode_state=EncodeState0},
IsFin0, PseudoHeaders, Headers0) ->
Stream = #stream{method=Method, local=idle} = stream_get(StreamID, State),
@@ -1152,12 +997,14 @@ prepare_headers(StreamID, State=#http2_machine{encode_state=EncodeState0},
{_, <<"HEAD">>} -> fin;
_ -> IsFin0
end,
- Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)),
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
{HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0),
{ok, IsFin, HeaderBlock, stream_store(Stream#stream{local=IsFin0},
State#http2_machine{encode_state=EncodeState})}.
--spec prepare_push_promise(cow_http2:streamid(), State, pseudo_headers(), cow_http:headers())
+-spec prepare_push_promise(cow_http2:streamid(), State,
+ cow_http:pseudo_headers(), cow_http:headers())
-> {ok, cow_http2:streamid(), iodata(), State}
| {error, no_push} when State::http2_machine().
prepare_push_promise(_, #http2_machine{remote_settings=#{enable_push := false}}, _, _) ->
@@ -1171,7 +1018,8 @@ prepare_push_promise(StreamID, State=#http2_machine{encode_state=EncodeState0,
{_, TE0} -> TE0;
false -> undefined
end,
- Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)),
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
{HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0),
{ok, LocalStreamID, HeaderBlock, stream_store(
#stream{id=LocalStreamID, method=maps:get(method, PseudoHeaders),
@@ -1179,34 +1027,6 @@ prepare_push_promise(StreamID, State=#http2_machine{encode_state=EncodeState0,
local_window=LocalWindow, remote_window=RemoteWindow, te=TE},
State#http2_machine{encode_state=EncodeState, local_streamid=LocalStreamID + 2})}.
-remove_http11_headers(Headers) ->
- RemoveHeaders0 = [
- <<"keep-alive">>,
- <<"proxy-connection">>,
- <<"transfer-encoding">>,
- <<"upgrade">>
- ],
- RemoveHeaders = case lists:keyfind(<<"connection">>, 1, Headers) of
- false ->
- RemoveHeaders0;
- {_, ConnHd} ->
- %% We do not need to worry about any "close" header because
- %% that header name is reserved.
- Connection = cow_http_hd:parse_connection(ConnHd),
- Connection ++ [<<"connection">>|RemoveHeaders0]
- end,
- lists:filter(fun({Name, _}) ->
- not lists:member(Name, RemoveHeaders)
- end, Headers).
-
-merge_pseudo_headers(PseudoHeaders, Headers0) ->
- lists:foldl(fun
- ({status, Status}, Acc) when is_integer(Status) ->
- [{<<":status">>, integer_to_binary(Status)}|Acc];
- ({Name, Value}, Acc) ->
- [{iolist_to_binary([$:, atom_to_binary(Name, latin1)]), Value}|Acc]
- end, Headers0, maps:to_list(PseudoHeaders)).
-
-spec prepare_trailers(cow_http2:streamid(), State, cow_http:headers())
-> {ok, iodata(), State} when State::http2_machine().
prepare_trailers(StreamID, State=#http2_machine{encode_state=EncodeState0}, Trailers) ->
@@ -1215,9 +1035,9 @@ prepare_trailers(StreamID, State=#http2_machine{encode_state=EncodeState0}, Trai
{ok, HeaderBlock, stream_store(Stream#stream{local=fin},
State#http2_machine{encode_state=EncodeState})}.
--spec send_or_queue_data(cow_http2:streamid(), State, cow_http2:fin(), DataOrFileOrTrailers)
+-spec send_or_queue_data(cow_http2:streamid(), State, cow_http:fin(), DataOrFileOrTrailers)
-> {ok, State}
- | {send, [{cow_http2:streamid(), cow_http2:fin(), [DataOrFileOrTrailers]}], State}
+ | {send, [{cow_http2:streamid(), cow_http:fin(), [DataOrFileOrTrailers]}], State}
when State::http2_machine(), DataOrFileOrTrailers::
{data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}.
send_or_queue_data(StreamID, State0=#http2_machine{opts=Opts, local_window=ConnWindow},
@@ -1272,8 +1092,8 @@ send_or_queue_data(StreamID, State0=#http2_machine{opts=Opts, local_window=ConnW
%% Internal data sending/queuing functions.
-%% @todo Should we ever want to implement the PRIORITY mechanism,
-%% this would be the place to do it. Right now, we just go over
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it. So we just go over
%% all streams and send what we can until either everything is
%% sent or we run out of space in the window.
send_data(State0=#http2_machine{streams=Streams0}) ->
@@ -1607,7 +1427,7 @@ get_stream_local_buffer_size(StreamID, State=#http2_machine{mode=Mode,
%% Retrieve the local state for a stream, including the state in the queue.
-spec get_stream_local_state(cow_http2:streamid(), http2_machine())
- -> {ok, idle | cow_http2:fin(), empty | nofin | fin} | {error, not_found | closed}.
+ -> {ok, idle | cow_http:fin(), empty | nofin | fin} | {error, not_found | closed}.
get_stream_local_state(StreamID, State=#http2_machine{mode=Mode,
local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) ->
case stream_get(StreamID, State) of
@@ -1630,7 +1450,7 @@ get_stream_local_state(StreamID, State=#http2_machine{mode=Mode,
%% Retrieve the remote state for a stream.
-spec get_stream_remote_state(cow_http2:streamid(), http2_machine())
- -> {ok, idle | cow_http2:fin()} | {error, not_found | closed}.
+ -> {ok, idle | cow_http:fin()} | {error, not_found | closed}.
get_stream_remote_state(StreamID, State=#http2_machine{mode=Mode,
local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) ->
case stream_get(StreamID, State) of
diff --git a/src/cow_http3.erl b/src/cow_http3.erl
new file mode 100644
index 0000000..d3776ec
--- /dev/null
+++ b/src/cow_http3.erl
@@ -0,0 +1,458 @@
+%% Copyright (c) 2023-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http3).
+
+%% Parsing.
+-export([parse/1]).
+-export([parse_unidi_stream_header/1]).
+-export([code_to_error/1]).
+
+%% Building.
+-export([data/1]).
+-export([headers/1]).
+-export([settings/1]).
+-export([error_to_code/1]).
+-export([encode_int/1]).
+
+-type stream_id() :: non_neg_integer().
+-export_type([stream_id/0]).
+
+-type push_id() :: non_neg_integer().
+-export_type([push_id/0]).
+
+-type settings() :: #{
+ qpack_max_table_capacity => 0..16#3fffffffffffffff,
+ max_field_section_size => 0..16#3fffffffffffffff,
+ qpack_blocked_streams => 0..16#3fffffffffffffff,
+ enable_connect_protocol => boolean()
+}.
+-export_type([settings/0]).
+
+-type error() :: h3_no_error
+ | h3_general_protocol_error
+ | h3_internal_error
+ | h3_stream_creation_error
+ | h3_closed_critical_stream
+ | h3_frame_unexpected
+ | h3_frame_error
+ | h3_excessive_load
+ | h3_id_error
+ | h3_settings_error
+ | h3_missing_settings
+ | h3_request_rejected
+ | h3_request_cancelled
+ | h3_request_incomplete
+ | h3_message_error
+ | h3_connect_error
+ | h3_version_fallback.
+-export_type([error/0]).
+
+-type frame() :: {data, binary()}
+ | {headers, binary()}
+ | {cancel_push, push_id()}
+ | {settings, settings()}
+ | {push_promise, push_id(), binary()}
+ | {goaway, stream_id() | push_id()}
+ | {max_push_id, push_id()}.
+-export_type([frame/0]).
+
+%% Parsing.
+
+-spec parse(binary())
+ -> {ok, frame(), binary()}
+ | {more, {data, binary()} | ignore, non_neg_integer()}
+ | {ignore, binary()}
+ | {connection_error, h3_frame_error | h3_frame_unexpected | h3_settings_error, atom()}
+ | more.
+
+%%
+%% DATA frames.
+%%
+parse(<<0, 0:2, Len:6, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 1:2, Len:14, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 2:2, Len:30, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 3:2, Len:62, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+%% DATA frames may be split over multiple QUIC packets
+%% but we want to process them immediately rather than
+%% risk buffering a very large payload.
+parse(<<0, 0:2, Len:6, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 1:2, Len:14, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 2:2, Len:30, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 3:2, Len:62, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+%%
+%% HEADERS frames.
+%%
+parse(<<1, 0:2, 0:6, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 1:2, 0:14, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 2:2, 0:30, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 3:2, 0:62, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 0:2, Len:6, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 1:2, Len:14, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 2:2, Len:30, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 3:2, Len:62, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+%%
+%% CANCEL_PUSH frames.
+%%
+parse(<<3, 0:2, 1:6, 0:2, PushID:6, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 2:6, 1:2, PushID:14, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 4:6, 2:2, PushID:30, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 8:6, 3:2, PushID:62, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'CANCEL_PUSH frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.3)'};
+%%
+%% SETTINGS frames.
+%%
+parse(<<4, 0:2, Len:6, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 1:2, Len:14, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 2:2, Len:30, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 3:2, Len:62, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+%%
+%% PUSH_PROMISE frames.
+%%
+parse(<<5, 0:2, Len:6, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 1:2, Len:14, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 2:2, Len:30, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 3:2, Len:62, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+%%
+%% GOAWAY frames.
+%%
+parse(<<7, 0:2, 1:6, 0:2, StreamOrPushID:6, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 2:6, 1:2, StreamOrPushID:14, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 4:6, 2:2, StreamOrPushID:30, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 8:6, 3:2, StreamOrPushID:62, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, N:6, _/bits>>) when N =:= 1; N =:= 2; N =:= 4; N =:= 8 ->
+ more;
+parse(<<7, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'GOAWAY frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.6)'};
+%%
+%% MAX_PUSH_ID frames.
+%%
+parse(<<13, 0:2, 1:6, 0:2, PushID:6, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 2:6, 1:2, PushID:14, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 4:6, 2:2, PushID:30, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 8:6, 3:2, PushID:62, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, N:6, _/bits>>) when N =:= 1; N =:= 2; N =:= 4; N =:= 8 ->
+ more;
+parse(<<13, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'MAX_PUSH_ID frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.6)'};
+%%
+%% HTTP/2 frame types must be rejected.
+%%
+parse(<<2, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 PRIORITY frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<6, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 PING frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<8, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 WINDOW_UPDATE frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<9, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 CONTINUATION frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+%%
+%% Unknown frames must be ignored.
+parse(<<0:2, Type:6, 0:2, Len:6, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 1:2, Len:14, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 2:2, Len:30, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 3:2, Len:62, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+%%
+%% Incomplete frames for those we fully process only.
+%%
+parse(_) ->
+ more.
+
+parse_settings_id(Rest, 0, Settings) ->
+ {ok, {settings, Settings}, Rest};
+parse_settings_id(<<0:2, Identifier:6, Rest/bits>>, Len, Settings) when Len >= 1 ->
+ parse_settings_val(Rest, Len - 1, Settings, Identifier);
+parse_settings_id(<<1:2, Identifier:14, Rest/bits>>, Len, Settings) when Len >= 2 ->
+ parse_settings_val(Rest, Len - 2, Settings, Identifier);
+parse_settings_id(<<2:2, Identifier:30, Rest/bits>>, Len, Settings) when Len >= 4 ->
+ parse_settings_val(Rest, Len - 4, Settings, Identifier);
+parse_settings_id(<<3:2, Identifier:62, Rest/bits>>, Len, Settings) when Len >= 8 ->
+ parse_settings_val(Rest, Len - 8, Settings, Identifier);
+parse_settings_id(_, _, _) ->
+ {connection_error, h3_frame_error,
+ 'SETTINGS payload size exceeds the length given. (RFC9114 7.1, RFC9114 7.2.4)'}.
+
+parse_settings_val(<<0:2, Value:6, Rest/bits>>, Len, Settings, Identifier) when Len >= 1 ->
+ parse_settings_id_val(Rest, Len - 1, Settings, Identifier, Value);
+parse_settings_val(<<1:2, Value:14, Rest/bits>>, Len, Settings, Identifier) when Len >= 2 ->
+ parse_settings_id_val(Rest, Len - 2, Settings, Identifier, Value);
+parse_settings_val(<<2:2, Value:30, Rest/bits>>, Len, Settings, Identifier) when Len >= 4 ->
+ parse_settings_id_val(Rest, Len - 4, Settings, Identifier, Value);
+parse_settings_val(<<3:2, Value:62, Rest/bits>>, Len, Settings, Identifier) when Len >= 8 ->
+ parse_settings_id_val(Rest, Len - 8, Settings, Identifier, Value);
+parse_settings_val(_, _, _, _) ->
+ {connection_error, h3_frame_error,
+ 'SETTINGS payload size exceeds the length given. (RFC9114 7.1, RFC9114 7.2.4)'}.
+
+parse_settings_id_val(Rest, Len, Settings, Identifier, Value) ->
+ case Identifier of
+ %% SETTINGS_QPACK_MAX_TABLE_CAPACITY (RFC9204).
+ 1 ->
+ parse_settings_key_val(Rest, Len, Settings, qpack_max_table_capacity, Value);
+ %% SETTINGS_MAX_FIELD_SECTION_SIZE (RFC9114).
+ 6 ->
+ parse_settings_key_val(Rest, Len, Settings, max_field_section_size, Value);
+ %% SETTINGS_QPACK_BLOCKED_STREAMS (RFC9204).
+ 7 ->
+ parse_settings_key_val(Rest, Len, Settings, qpack_blocked_streams, Value);
+ %% SETTINGS_ENABLE_CONNECT_PROTOCOL (RFC9220).
+ 8 when Value =:= 0 ->
+ parse_settings_key_val(Rest, Len, Settings, enable_connect_protocol, false);
+ 8 when Value =:= 1 ->
+ parse_settings_key_val(Rest, Len, Settings, enable_connect_protocol, true);
+ 8 ->
+ {connection_error, h3_settings_error,
+ 'The SETTINGS_ENABLE_CONNECT_PROTOCOL value MUST be 0 or 1. (RFC9220 3, RFC8441 3)'};
+ _ when Identifier < 6 ->
+ {connection_error, h3_settings_error,
+ 'HTTP/2 setting not defined for HTTP/3 must be rejected. (RFC9114 7.2.4.1)'};
+ %% Unknown settings must be ignored.
+ _ ->
+ parse_settings_id(Rest, Len, Settings)
+ end.
+
+parse_settings_key_val(Rest, Len, Settings, Key, Value) ->
+ case Settings of
+ #{Key := _} ->
+ {connection_error, h3_settings_error,
+ 'A duplicate setting identifier was found. (RFC9114 7.2.4)'};
+ _ ->
+ parse_settings_id(Rest, Len, Settings#{Key => Value})
+ end.
+
+parse_push_promise(<<0:2, PushID:6, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 1)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<1:2, PushID:14, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 2)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<2:2, PushID:30, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 4)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<3:2, PushID:62, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 8)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest}.
+
+%% Large ignored frames could lead to DoS. Users of
+%% this module must limit the size of such frames.
+parse_ignore(Data, Len) ->
+ case Data of
+ <<_:Len/binary, Rest/bits>> ->
+ {ignore, Rest};
+ _ ->
+ {more, ignore, Len - byte_size(Data)}
+ end.
+
+-spec parse_unidi_stream_header(binary())
+ -> {ok, control | push | encoder | decoder, binary()}
+ | {undefined, binary()}.
+
+parse_unidi_stream_header(<<0, Rest/bits>>) ->
+ {ok, control, Rest};
+parse_unidi_stream_header(<<1, Rest/bits>>) ->
+ {ok, push, Rest};
+parse_unidi_stream_header(<<2, Rest/bits>>) ->
+ {ok, encoder, Rest};
+parse_unidi_stream_header(<<3, Rest/bits>>) ->
+ {ok, decoder, Rest};
+parse_unidi_stream_header(<<0:2, _:6, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<1:2, _:14, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<2:2, _:30, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<3:2, _:62, Rest/bits>>) ->
+ {undefined, Rest}.
+
+-spec code_to_error(non_neg_integer()) -> error().
+
+code_to_error(16#0100) -> h3_no_error;
+code_to_error(16#0101) -> h3_general_protocol_error;
+code_to_error(16#0102) -> h3_internal_error;
+code_to_error(16#0103) -> h3_stream_creation_error;
+code_to_error(16#0104) -> h3_closed_critical_stream;
+code_to_error(16#0105) -> h3_frame_unexpected;
+code_to_error(16#0106) -> h3_frame_error;
+code_to_error(16#0107) -> h3_excessive_load;
+code_to_error(16#0108) -> h3_id_error;
+code_to_error(16#0109) -> h3_settings_error;
+code_to_error(16#010a) -> h3_missing_settings;
+code_to_error(16#010b) -> h3_request_rejected;
+code_to_error(16#010c) -> h3_request_cancelled;
+code_to_error(16#010d) -> h3_request_incomplete;
+code_to_error(16#010e) -> h3_message_error;
+code_to_error(16#010f) -> h3_connect_error;
+code_to_error(16#0110) -> h3_version_fallback;
+%% Unknown/reserved error codes must be treated
+%% as equivalent to H3_NO_ERROR.
+code_to_error(_) -> h3_no_error.
+
+%% Building.
+
+-spec data(iodata()) -> iolist().
+
+data(Data) ->
+ Len = encode_int(iolist_size(Data)),
+ [<<0:8>>, Len, Data].
+
+-spec headers(iodata()) -> iolist().
+
+headers(HeaderBlock) ->
+ Len = encode_int(iolist_size(HeaderBlock)),
+ [<<1:8>>, Len, HeaderBlock].
+
+-spec settings(settings()) -> iolist().
+
+settings(Settings) when Settings =:= #{} ->
+ <<4:8, 0:8>>;
+settings(Settings) ->
+ Payload = settings_payload(Settings),
+ Len = encode_int(iolist_size(Payload)),
+ [<<4:8>>, Len, Payload].
+
+settings_payload(Settings) ->
+ Payload = [case Key of
+ %% SETTINGS_QPACK_MAX_TABLE_CAPACITY (RFC9204).
+ qpack_max_table_capacity when Value =:= 0 -> <<>>;
+ qpack_max_table_capacity -> [encode_int(1), encode_int(Value)];
+ %% SETTINGS_MAX_FIELD_SECTION_SIZE (RFC9114).
+ max_header_list_size when Value =:= infinity -> <<>>;
+ max_header_list_size -> [encode_int(6), encode_int(Value)];
+ %% SETTINGS_QPACK_BLOCKED_STREAMS (RFC9204).
+ qpack_blocked_streams when Value =:= 0 -> <<>>;
+ qpack_blocked_streams -> [encode_int(1), encode_int(Value)];
+ %% SETTINGS_ENABLE_CONNECT_PROTOCOL (RFC9220).
+ enable_connect_protocol when Value -> [encode_int(8), encode_int(1)];
+ enable_connect_protocol -> [encode_int(8), encode_int(0)]
+ end || {Key, Value} <- maps:to_list(Settings)],
+ %% Include one reserved identifier in addition.
+ ReservedType = 16#1f * (rand:uniform(148764065110560900) - 1) + 16#21,
+ [encode_int(ReservedType), encode_int(rand:uniform(15384) - 1)|Payload].
+
+-spec error_to_code(error()) -> non_neg_integer().
+
+error_to_code(h3_no_error) ->
+ %% Implementations should select a reserved error code
+ %% with some probability when they would have sent H3_NO_ERROR. (RFC9114 8.1)
+ case rand:uniform(2) of
+ 1 -> 16#0100;
+ 2 -> 16#1f * (rand:uniform(148764065110560900) - 1) + 16#21
+ end;
+error_to_code(h3_general_protocol_error) -> 16#0101;
+error_to_code(h3_internal_error) -> 16#0102;
+error_to_code(h3_stream_creation_error) -> 16#0103;
+error_to_code(h3_closed_critical_stream) -> 16#0104;
+error_to_code(h3_frame_unexpected) -> 16#0105;
+error_to_code(h3_frame_error) -> 16#0106;
+error_to_code(h3_excessive_load) -> 16#0107;
+error_to_code(h3_id_error) -> 16#0108;
+error_to_code(h3_settings_error) -> 16#0109;
+error_to_code(h3_missing_settings) -> 16#010a;
+error_to_code(h3_request_rejected) -> 16#010b;
+error_to_code(h3_request_cancelled) -> 16#010c;
+error_to_code(h3_request_incomplete) -> 16#010d;
+error_to_code(h3_message_error) -> 16#010e;
+error_to_code(h3_connect_error) -> 16#010f;
+error_to_code(h3_version_fallback) -> 16#0110.
+
+-spec encode_int(0..16#3fffffffffffffff) -> binary().
+
+encode_int(I) when I < 64 ->
+ <<0:2, I:6>>;
+encode_int(I) when I < 16384 ->
+ <<1:2, I:14>>;
+encode_int(I) when I < 1073741824 ->
+ <<2:2, I:30>>;
+encode_int(I) when I < 4611686018427387904 ->
+ <<3:2, I:62>>.
diff --git a/src/cow_http3_machine.erl b/src/cow_http3_machine.erl
new file mode 100644
index 0000000..b1b4a68
--- /dev/null
+++ b/src/cow_http3_machine.erl
@@ -0,0 +1,721 @@
+%% Copyright (c) 2023-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http3_machine).
+
+-export([init/2]).
+-export([init_unidi_local_streams/4]).
+-export([init_unidi_stream/3]).
+-export([set_unidi_remote_stream_type/3]).
+-export([init_bidi_stream/2]).
+-export([init_bidi_stream/3]).
+-export([close_bidi_stream_for_sending/2]).
+-export([close_stream/2]).
+-export([unidi_data/4]).
+-export([frame/4]).
+-export([ignored_frame/2]).
+-export([prepare_headers/5]).
+-export([prepare_trailers/3]).
+-export([reset_stream/2]).
+-export([get_bidi_stream_local_state/2]).
+-export([get_bidi_stream_remote_state/2]).
+
+-type opts() :: #{
+ enable_connect_protocol => boolean(),
+ max_decode_blocked_streams => 0..16#3fffffffffffffff,
+ max_decode_table_size => 0..16#3fffffffffffffff,
+ max_encode_blocked_streams => 0..16#3fffffffffffffff,
+ max_encode_table_size => 0..16#3fffffffffffffff
+}.
+-export_type([opts/0]).
+
+-type unidi_stream_dir() :: unidi_local | unidi_remote.
+-type unidi_stream_type() :: control | push | encoder | decoder.
+
+-record(unidi_stream, {
+ id :: cow_http3:stream_id(),
+
+ %% Unidi stream direction (local = we initiated).
+ dir :: unidi_stream_dir(),
+
+ %% Unidi stream type.
+ type :: undefined | unidi_stream_type()
+}).
+
+-record(bidi_stream, {
+ id :: cow_http3:stream_id(),
+
+ %% Request method.
+ method = undefined :: undefined | binary(),
+
+ %% Whether we finished sending data.
+ local = idle :: idle | cow_http:fin(),
+
+ %% Whether we finished receiving data.
+ remote = idle :: idle | cow_http:fin(),
+
+ %% Size expected and read from the request body.
+ remote_expected_size = undefined :: undefined | non_neg_integer(),
+ remote_read_size = 0 :: non_neg_integer(),
+
+ %% Unparsed te header. Used to know if we can send trailers.
+ %% Note that we can always send trailers to the server.
+ te :: undefined | binary()
+}).
+
+-type stream() :: #unidi_stream{} | #bidi_stream{}.
+
+-record(http3_machine, {
+ %% Whether the HTTP/3 endpoint is a client or a server.
+ mode :: client | server,
+
+ %% Current state of the supported unidi streams:
+ %% * the control stream must send SETTINGS as its first frame
+ %% * none of these streams can be closed once they are open
+ peer_control_state = no_stream :: no_stream | no_settings | ready,
+ peer_decode_state = no_stream :: no_stream | ready,
+ peer_encode_state = no_stream :: no_stream | ready,
+
+ %% Maximum Push ID.
+ max_push_id = -1 :: -1 | cow_http3:push_id(),
+
+ %% Settings are separate for each endpoint. They are sent once
+ %% at the beginning of the control stream.
+ local_settings = #{
+% enable_connect_protocol => false
+% max_decode_blocked_streams => 0,
+% max_decode_table_size => 0,
+% max_encode_blocked_streams => 0,
+% max_encode_table_size => 4096
+ } :: map(),
+
+ %% Currently active HTTP/3 streams. Streams may be initiated either
+ %% by the client or by the server through PUSH_PROMISE frames.
+ streams = #{} :: #{cow_http3:stream_id() => stream()},
+
+ %% QPACK decoding and encoding state.
+ decode_state :: cow_qpack:state(),
+ encode_state :: cow_qpack:state()
+}).
+
+-opaque http3_machine() :: #http3_machine{}.
+-export_type([http3_machine/0]).
+
+-type instructions() :: undefined
+ | {decoder_instructions | encoder_instructions, iodata()}.
+
+-spec init(client | server, opts())
+ -> {ok, iolist(), http3_machine()}.
+
+init(Mode, Opts) ->
+ Settings = init_settings(Opts),
+ {ok, cow_http3:settings(Settings), #http3_machine{
+ mode=Mode, local_settings=Settings,
+ decode_state=init_decode_state(Opts),
+ encode_state=init_encode_state(Opts)
+ }}.
+
+init_settings(Opts) ->
+ S0 = setting_from_opt(#{}, Opts, max_decode_table_size,
+ qpack_max_table_capacity, 0),
+ S1 = setting_from_opt(S0, Opts, max_decode_blocked_streams,
+ qpack_blocked_streams, 0),
+ %% @todo max_field_section_size
+ setting_from_opt(S1, Opts, enable_connect_protocol,
+ enable_connect_protocol, false).
+
+setting_from_opt(Settings, Opts, OptName, SettingName, Default) ->
+ case maps:get(OptName, Opts, Default) of
+ Default -> Settings;
+ Value -> Settings#{SettingName => Value}
+ end.
+
+%% Note that only the decoder sends them as SETTINGS.
+init_decode_state(Opts) ->
+ MaxTableCapacity = maps:get(max_decode_table_size, Opts, 0),
+ MaxBlockedStreams = maps:get(max_decode_blocked_streams, Opts, 0),
+ cow_qpack:init(decoder, MaxTableCapacity, MaxBlockedStreams).
+
+%% We want to use the dynamic table by default to improve
+%% compression ratio, but we do not allow blocked streams
+%% by default because they could lead to the latency being
+%% worse than otherwise.
+init_encode_state(Opts) ->
+ MaxTableCapacity = maps:get(max_encode_table_size, Opts, 4096),
+ MaxBlockedStreams = maps:get(max_encode_blocked_streams, Opts, 0),
+ cow_qpack:init(encoder, MaxTableCapacity, MaxBlockedStreams).
+
+-spec init_unidi_local_streams(cow_http3:stream_id(), cow_http3:stream_id(),
+ cow_http3:stream_id(), State) -> State when State::http3_machine().
+
+init_unidi_local_streams(ControlID, EncoderID, DecoderID,
+ State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{
+ streams=Streams#{
+ ControlID => #unidi_stream{id=ControlID, dir=unidi_local, type=control},
+ EncoderID => #unidi_stream{id=EncoderID, dir=unidi_local, type=encoder},
+ DecoderID => #unidi_stream{id=DecoderID, dir=unidi_local, type=decoder}
+ }}.
+
+-spec init_unidi_stream(cow_http3:stream_id(), unidi_stream_dir(), State)
+ -> State when State::http3_machine().
+
+init_unidi_stream(StreamID, StreamDir, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{StreamID => #unidi_stream{
+ id=StreamID, dir=StreamDir, type=undefined}}}.
+
+-spec set_unidi_remote_stream_type(cow_http3:stream_id(), unidi_stream_type(), State)
+ -> {ok, State}
+ | {error, {connection_error, h3_stream_creation_error, atom()}, State}
+ when State::http3_machine().
+
+set_unidi_remote_stream_type(StreamID, Type=control,
+ State=#http3_machine{peer_control_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_control_state=no_settings})};
+set_unidi_remote_stream_type(_, control, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two control streams. (RFC9114 6.2.1)'},
+ State};
+set_unidi_remote_stream_type(StreamID, Type=decoder,
+ State=#http3_machine{peer_decode_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_decode_state=ready})};
+set_unidi_remote_stream_type(StreamID, Type=encoder,
+ State=#http3_machine{peer_encode_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_encode_state=ready})};
+set_unidi_remote_stream_type(_, decoder, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two decoder streams. (RFC9204 4.2)'},
+ State};
+set_unidi_remote_stream_type(_, encoder, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two encoder streams. (RFC9204 4.2)'},
+ State}.
+
+%% All bidi streams are request/response.
+%% We only need to know the method when in client mode.
+
+-spec init_bidi_stream(cow_http3:stream_id(), State)
+ -> State when State::http3_machine().
+
+init_bidi_stream(StreamID, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{
+ StreamID => #bidi_stream{id=StreamID}
+ }}.
+
+-spec init_bidi_stream(cow_http3:stream_id(), binary(), State)
+ -> State when State::http3_machine().
+
+init_bidi_stream(StreamID, Method, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{
+ StreamID => #bidi_stream{id=StreamID, method=Method}
+ }}.
+
+-spec close_bidi_stream_for_sending(cow_http3:stream_id(), State)
+ -> State when State::http3_machine().
+
+close_bidi_stream_for_sending(StreamID, State=#http3_machine{streams=Streams}) ->
+ #{StreamID := Stream} = Streams,
+ stream_store(Stream#bidi_stream{local=fin}, State).
+
+-spec close_stream(cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {error, {connection_error, h3_closed_critical_stream, atom()}, State}
+ when State::http3_machine().
+
+close_stream(StreamID, State=#http3_machine{streams=Streams0}) ->
+ case maps:take(StreamID, Streams0) of
+ {#unidi_stream{type=control}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'A control stream was closed. (RFC9114 6.2.1)'},
+ State#http3_machine{streams=Streams}};
+ {#unidi_stream{type=decoder}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'A decoder stream was closed. (RFC9204 4.2)'},
+ State#http3_machine{streams=Streams}};
+ {#unidi_stream{type=encoder}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'An encoder stream was closed. (RFC9204 4.2)'},
+ State#http3_machine{streams=Streams}};
+ {_, Streams} ->
+ {ok, State#http3_machine{streams=Streams}}
+ end.
+
+-spec unidi_data(binary(), cow_http:fin(), cow_http3:stream_id(), State)
+ -> {ok, instructions(), State}
+ | {error, {connection_error, cow_qpack:error(), atom()}, State}
+ when State::http3_machine().
+
+%% All currently supported unidi streams are critical.
+unidi_data(_, fin, _, State) ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'The FIN flag was set on an encoder or decoder stream. (RFC9204 4.2)'},
+ State};
+unidi_data(Data, nofin, StreamID, State=#http3_machine{
+ decode_state=DecState0, encode_state=EncState0}) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=decoder} ->
+ case cow_qpack:execute_decoder_instructions(Data, EncState0) of
+ {ok, EncState} ->
+ {ok, undefined, State#http3_machine{encode_state=EncState}};
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ end;
+ #unidi_stream{type=encoder} ->
+ case cow_qpack:execute_encoder_instructions(Data, DecState0) of
+ {ok, <<>>, DecState} ->
+ {ok, undefined, State#http3_machine{decode_state=DecState}};
+ {ok, DecData, DecState} ->
+ {ok, {decoder_instructions, DecData},
+ State#http3_machine{decode_state=DecState}};
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ end
+ end.
+
+-spec frame(cow_http3:frame(), cow_http:fin(), cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {ok, {data, binary()}, State}
+ | {ok, {headers, cow_http:headers(), cow_http:pseudo_headers(),
+ non_neg_integer() | undefined}, instructions(), State}
+ | {ok, {trailers, cow_http:headers()}, instructions(), State}
+ | {ok, {goaway, cow_http3:stream_id() | cow_http3:push_id()}, State}
+ | {error, {stream_error, h3_message_error, atom()}, instructions(), State}
+ | {error, {connection_error, cow_http3:error() | cow_qpack:error(), atom()}, State}
+ when State::http3_machine().
+
+frame(Frame, IsFin, StreamID, State) ->
+ case element(1, Frame) of
+ data -> data_frame(Frame, IsFin, StreamID, State);
+ headers -> headers_frame(Frame, IsFin, StreamID, State);
+ cancel_push -> cancel_push_frame(Frame, IsFin, StreamID, State);
+ settings -> settings_frame(Frame, IsFin, StreamID, State);
+ push_promise -> push_promise_frame(Frame, IsFin, StreamID, State);
+ goaway -> goaway_frame(Frame, IsFin, StreamID, State);
+ max_push_id -> max_push_id_frame(Frame, IsFin, StreamID, State)
+ end.
+
+%% DATA frame.
+
+data_frame(Frame={data, Data}, IsFin, StreamID, State) ->
+ DataLen = byte_size(Data),
+ case stream_get(StreamID, State) of
+ Stream = #bidi_stream{remote=nofin} ->
+ data_frame(Frame, IsFin, Stream, State, DataLen);
+ #bidi_stream{remote=idle} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA frame received before a HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #bidi_stream{remote=fin} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA frame received after trailer HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+data_frame(Frame, IsFin, Stream0=#bidi_stream{remote_read_size=StreamRead}, State0, DataLen) ->
+ Stream = Stream0#bidi_stream{remote=IsFin,
+ remote_read_size=StreamRead + DataLen},
+ State = stream_store(Stream, State0),
+ case is_body_size_valid(Stream) of
+ true ->
+ {ok, Frame, State}%;
+%% @todo Implement and update error type/message.
+% false ->
+% stream_reset(StreamID, State, protocol_error,
+% 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)')
+ end.
+
+%% It's always valid when no content-length header was specified.
+is_body_size_valid(#bidi_stream{remote_expected_size=undefined}) ->
+ true;
+%% We didn't finish reading the body but the size is already larger than expected.
+is_body_size_valid(#bidi_stream{remote=nofin, remote_expected_size=Expected,
+ remote_read_size=Read}) when Read > Expected ->
+ false;
+is_body_size_valid(#bidi_stream{remote=nofin}) ->
+ true;
+is_body_size_valid(#bidi_stream{remote=fin, remote_expected_size=Expected,
+ remote_read_size=Expected}) ->
+ true;
+%% We finished reading the body and the size read is not the one expected.
+is_body_size_valid(_) ->
+ false.
+
+%% HEADERS frame.
+
+headers_frame(Frame, IsFin, StreamID, State=#http3_machine{mode=Mode}) ->
+ case stream_get(StreamID, State) of
+ %% Headers.
+ Stream=#bidi_stream{remote=idle} ->
+ headers_decode(Frame, IsFin, Stream, State, case Mode of
+ server -> request;
+ client -> response
+ end);
+ %% Trailers.
+ Stream=#bidi_stream{remote=nofin} ->
+ headers_decode(Frame, IsFin, Stream, State, trailers);
+ %% Additional frame received after trailers.
+ #bidi_stream{remote=fin} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'HEADERS frame received after trailer HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+headers_decode({headers, EncodedFieldSection}, IsFin, Stream=#bidi_stream{id=StreamID},
+ State=#http3_machine{decode_state=DecodeState0}, Type) ->
+ try cow_qpack:decode_field_section(EncodedFieldSection, StreamID, DecodeState0) of
+ {ok, Headers, DecData, DecodeState} ->
+ headers_process(Stream,
+ State#http3_machine{decode_state=DecodeState}, IsFin, Type, DecData, Headers);
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ catch _:_ ->
+ {error, {connection_error, qpack_decompression_failed,
+ 'Exception while trying to decode QPACK-encoded header block. (RFC9204 2.2)'},
+ State}
+ end.
+
+headers_process(Stream=#bidi_stream{method=ReqMethod},
+ State=#http3_machine{local_settings=LocalSettings},
+ IsFin, Type, DecData, Headers0) ->
+ case cow_http:process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings) of
+ {headers, Headers, PseudoHeaders, Len} ->
+ headers_frame(Stream, State, IsFin, Type, DecData, Headers, PseudoHeaders, Len);
+% {push_promise, Headers, PseudoHeaders} -> %% @todo Implement push promises.
+ {trailers, Headers} ->
+ trailers_frame(Stream, State, DecData, Headers);
+ {error, Reason} ->
+ {error, {stream_error, h3_message_error, format_error(Reason)},
+ %% We decoded the headers so must send the instructions if any.
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}
+ end.
+
+headers_frame(Stream0, State0, IsFin, Type, DecData, Headers, PseudoHeaders, Len) ->
+ Stream = case Type of
+ request ->
+ TE = case lists:keyfind(<<"te">>, 1, Headers) of
+ {_, TE0} -> TE0;
+ false -> undefined
+ end,
+ Stream0#bidi_stream{method=maps:get(method, PseudoHeaders),
+ remote=IsFin, remote_expected_size=Len, te=TE};
+ response ->
+ case PseudoHeaders of
+ #{status := Status} when Status >= 100, Status =< 199 -> Stream0;
+ _ -> Stream0#bidi_stream{remote=IsFin, remote_expected_size=Len}
+ end
+ end,
+ State = stream_store(Stream, State0),
+ {ok, {headers, Headers, PseudoHeaders, Len},
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}.
+
+trailers_frame(Stream0, State0, DecData, Headers) ->
+ Stream = Stream0#bidi_stream{remote=fin},
+ State = stream_store(Stream, State0),
+ %% @todo Error out if we didn't get the full body.
+ case is_body_size_valid(Stream) of
+ true ->
+ {ok, {trailers, Headers},
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}%;
+%% @todo Implement and update error type/message.
+% false ->
+% stream_reset(StreamID, State, protocol_error,
+% 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)')
+ end.
+
+format_error(connect_invalid_pseudo_header) ->
+ 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC9114 4.4)';
+format_error(connect_missing_authority) ->
+ 'CONNECT requests must include the :authority pseudo-header. (RFC9114 4.4)';
+format_error(empty_header_name) ->
+ 'Empty header names are not valid regular headers. (CVE-2019-9516)';
+format_error(extended_connect_missing_protocol) ->
+ 'Extended CONNECT requests must include the :protocol pseudo-header. (RFC9220, RFC8441 4)';
+format_error(invalid_connection_header) ->
+ 'The connection header is not allowed. (RFC9114 4.2)';
+format_error(invalid_keep_alive_header) ->
+ 'The keep-alive header is not allowed. (RFC9114 4.2)';
+format_error(invalid_protocol_pseudo_header) ->
+ 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC9220, RFC8441 4)';
+format_error(invalid_proxy_authenticate_header) ->
+ 'The proxy-authenticate header is not allowed. (RFC9114 4.2)';
+format_error(invalid_proxy_authorization_header) ->
+ 'The proxy-authorization header is not allowed. (RFC9114 4.2)';
+format_error(invalid_pseudo_header) ->
+ 'An unknown or invalid pseudo-header was found. (RFC9114 4.3)';
+format_error(invalid_status_pseudo_header) ->
+ 'The :status pseudo-header value is invalid. (RFC9114 4.3, RFC9114 4.3.2)';
+format_error(invalid_te_header) ->
+ 'The te header is only allowed in request headers. (RFC9114 4.2)';
+format_error(invalid_te_value) ->
+ 'The te header with a value other than "trailers" is not allowed. (RFC9114 4.2)';
+format_error(invalid_transfer_encoding_header) ->
+ 'The transfer-encoding header is not allowed. (RFC9114 4.1)';
+format_error(invalid_upgrade_header) ->
+ 'The upgrade header is not allowed. (RFC9114 4.2)';
+format_error(missing_pseudo_header) ->
+ 'A required pseudo-header was not found. (RFC9114 4.3.1, RFC9114 4.3.2)';
+format_error(multiple_authority_pseudo_headers) ->
+ 'Multiple :authority pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_method_pseudo_headers) ->
+ 'Multiple :method pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_path_pseudo_headers) ->
+ 'Multiple :path pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_protocol_pseudo_headers) ->
+ 'Multiple :protocol pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_scheme_pseudo_headers) ->
+ 'Multiple :scheme pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_status_pseudo_headers) ->
+ 'Multiple :status pseudo-headers were found. (RFC9114 4.3.2)';
+format_error(non_zero_length_with_fin_flag) ->
+ 'HEADERS frame with the FIN flag contains a non-zero content-length. (RFC9114 4.1.2)';
+format_error(pseudo_header_after_regular) ->
+ 'Pseudo-headers were found after regular headers. (RFC9114 4.3)';
+format_error(trailer_invalid_pseudo_header) ->
+ 'Trailer header blocks must not contain pseudo-headers. (RFC9114 4.3)';
+format_error(uppercase_header_name) ->
+ 'Header names must be lowercase. (RFC9114 4.1.2, RFC9114 4.2)';
+format_error(Reason) ->
+ cow_http:format_semantic_error(Reason).
+
+cancel_push_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+settings_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The SETTINGS frame is not allowed on a bidi stream. (RFC9114 7.2.4)'},
+ State}
+ end.
+
+push_promise_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+goaway_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The GOAWAY frame is not allowed on a bidi stream. (RFC9114 7.2.6)'},
+ State}
+ end.
+
+max_push_id_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The MAX_PUSH_ID frame is not allowed on a bidi stream. (RFC9114 7.2.7)'},
+ State}
+ end.
+
+control_frame({settings, Settings}, State=#http3_machine{
+ peer_control_state=no_settings, encode_state=EncState0}) ->
+ %% @todo max_field_section_size
+ %% Send the QPACK values to the encoder.
+ MaxTableCapacity = maps:get(qpack_max_table_capacity, Settings, 0),
+ MaxBlockedStreams = maps:get(qpack_blocked_streams, Settings, 0),
+ EncState = cow_qpack:encoder_set_settings(MaxTableCapacity, MaxBlockedStreams, EncState0),
+ {ok, State#http3_machine{peer_control_state=ready, encode_state=EncState}};
+control_frame({settings, _}, State) ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The SETTINGS frame cannot be sent more than once. (RFC9114 7.2.4)'},
+ State};
+control_frame(_Frame, State=#http3_machine{peer_control_state=no_settings}) ->
+ {error, {connection_error, h3_missing_settings,
+ 'The first frame on the control stream must be a SETTINGS frame. (RFC9114 6.2.1)'},
+ State};
+control_frame(Frame = {goaway, _}, State) ->
+ {ok, Frame, State};
+%% @todo Implement server push.
+control_frame({max_push_id, PushID}, State=#http3_machine{max_push_id=MaxPushID}) ->
+ if
+ PushID >= MaxPushID ->
+ {ok, State#http3_machine{max_push_id=PushID}};
+ true ->
+ {error, {connection_error, h3_id_error,
+ 'MAX_PUSH_ID must not be lower than previously received. (RFC9114 7.2.7)'},
+ State}
+ end;
+control_frame(ignored_frame, State) ->
+ {ok, State};
+control_frame(_Frame, State) ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA and HEADERS frames are not allowed on the control stream. (RFC9114 7.2.1, RFC9114 7.2.2)'},
+ State}.
+
+%% Ignored frames.
+
+-spec ignored_frame(cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {error, {connection_error, cow_http3:error(), atom()}, State}
+ when State::http3_machine().
+
+ignored_frame(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(ignored_frame, State);
+ _ ->
+ {ok, State}
+ end.
+
+%% Functions for sending a message header or body. Note that
+%% this module does not send data directly, instead it returns
+%% a value that can then be used to send the frames.
+
+-spec prepare_headers(cow_http3:stream_id(), State,
+ idle | cow_http:fin(), cow_http:pseudo_headers(), cow_http:headers())
+ -> {ok, cow_http:fin(), iodata(), instructions(), State} when State::http3_machine().
+
+prepare_headers(StreamID, State=#http3_machine{encode_state=EncodeState0},
+ IsFin0, PseudoHeaders, Headers0) ->
+ Stream = #bidi_stream{method=Method, local=idle} = stream_get(StreamID, State),
+ IsFin = case {IsFin0, Method} of
+ {idle, _} -> nofin;
+ {_, <<"HEAD">>} -> fin;
+ _ -> IsFin0
+ end,
+ %% With QUIC we don't have a data queue so the local state
+ %% can be updated immediately.
+ LocalIsFin = case IsFin0 of
+ idle -> idle;
+ _ -> IsFin
+ end,
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
+ {ok, HeaderBlock, EncData, EncodeState}
+ = cow_qpack:encode_field_section(Headers, StreamID, EncodeState0),
+ {ok, IsFin, HeaderBlock,
+ case EncData of
+ [] -> undefined;
+ _ -> {encoder_instructions, EncData}
+ end,
+ stream_store(Stream#bidi_stream{local=LocalIsFin},
+ State#http3_machine{encode_state=EncodeState})}.
+
+-spec prepare_trailers(cow_http3:stream_id(), State, cow_http:headers())
+ -> {trailers, iodata(), instructions(), State}
+ | {no_trailers, State}
+ when State::http3_machine().
+
+prepare_trailers(StreamID, State=#http3_machine{encode_state=EncodeState0}, Trailers) ->
+ Stream = #bidi_stream{local=nofin, te=TE0} = stream_get(StreamID, State),
+ TE = try cow_http_hd:parse_te(TE0) of
+ {trailers, []} -> trailers;
+ _ -> no_trailers
+ catch _:_ ->
+ %% If we can't parse the TE header, assume we can't send trailers.
+ no_trailers
+ end,
+ case TE of
+ trailers ->
+ {ok, HeaderBlock, EncData, EncodeState}
+ = cow_qpack:encode_field_section(Trailers, StreamID, EncodeState0),
+ {trailers, HeaderBlock,
+ case EncData of
+ [] -> undefined;
+ _ -> {encoder_instructions, EncData}
+ end,
+ stream_store(Stream#bidi_stream{local=fin},
+ State#http3_machine{encode_state=EncodeState})};
+ no_trailers ->
+ {no_trailers, stream_store(Stream#bidi_stream{local=fin}, State)}
+ end.
+
+%% Public interface to reset streams.
+
+-spec reset_stream(cow_http3:stream_id(), State)
+ -> {ok, State} | {error, not_found} when State::http3_machine().
+
+reset_stream(StreamID, State=#http3_machine{streams=Streams0}) ->
+ case maps:take(StreamID, Streams0) of
+ {_, Streams} ->
+ {ok, State#http3_machine{streams=Streams}};
+ error ->
+ {error, not_found}
+ end.
+
+%% Retrieve the local state for a bidi stream.
+
+-spec get_bidi_stream_local_state(cow_http3:stream_id(), http3_machine())
+ -> {ok, idle | cow_http:fin()} | {error, not_found}.
+
+get_bidi_stream_local_state(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #bidi_stream{local=IsFin} ->
+ {ok, IsFin};
+ %% Stream may never have been opened, or could have
+ %% already been closed.
+ undefined ->
+ {error, not_found}
+ end.
+
+%% Retrieve the remote state for a bidi stream.
+
+-spec get_bidi_stream_remote_state(cow_http3:stream_id(), http3_machine())
+ -> {ok, idle | cow_http:fin()} | {error, not_found}.
+
+get_bidi_stream_remote_state(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #bidi_stream{remote=IsFin} ->
+ {ok, IsFin};
+ %% Stream may never have been opened, or could have
+ %% already been closed.
+ undefined ->
+ {error, not_found}
+ end.
+
+%% Stream-related functions.
+
+stream_get(StreamID, #http3_machine{streams=Streams}) ->
+ maps:get(StreamID, Streams, undefined).
+
+stream_store(Stream, State=#http3_machine{streams=Streams}) ->
+ StreamID = case Stream of
+ #bidi_stream{id=StreamID0} -> StreamID0;
+ #unidi_stream{id=StreamID0} -> StreamID0
+ end,
+ State#http3_machine{streams=Streams#{StreamID => Stream}}.
diff --git a/src/cow_qpack.erl b/src/cow_qpack.erl
new file mode 100644
index 0000000..027e29c
--- /dev/null
+++ b/src/cow_qpack.erl
@@ -0,0 +1,1581 @@
+%% Copyright (c) 2020-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_qpack).
+-dialyzer(no_improper_lists).
+
+-export([init/1]).
+-export([init/3]).
+
+-export([decode_field_section/3]).
+-export([execute_encoder_instructions/2]).
+-export([decoder_cancel_stream/1]). %% @todo Use it.
+
+-export([encode_field_section/3]).
+-export([encode_field_section/4]).
+-export([execute_decoder_instructions/2]).
+-export([encoder_set_settings/3]).
+
+-record(state, {
+ %% Configuration.
+ %%
+ %% For the encoder these values will be set to
+ %% the lowest value between configuration and SETTINGS.
+
+ %% Whether the configured values can be used. The
+ %% decoder can always use the configured values.
+ %% The encoder must wait for the SETTINGS frame.
+ settings_received :: boolean(),
+
+ %% Maximum size of the table.
+ max_table_capacity = 0 :: non_neg_integer(),
+
+ %% Maximum number of potentially blocked streams.
+ max_blocked_streams = 0 :: non_neg_integer(),
+
+ %% Dynamic table.
+
+ %% The current max table capacity after the encoder
+ %% sent an instruction to change the capacity.
+ capacity = 0 :: non_neg_integer(),
+
+ %% The size of each entry is len(Name) + len(Value) + 32.
+ size = 0 :: non_neg_integer(),
+
+ %% The number of entries ever inserted in the dynamic table.
+ %% This value is used on the decoder's size to know whether
+ %% it can decode a field section; and on both sides to find
+ %% entries in the dynamic table.
+ insert_count = 0 :: non_neg_integer(),
+
+ %% The dynamic table. The first value is the size of the entry
+ %% and the second value the entry (Name, Value tuple). The
+ %% order of the entries is from newest to oldest.
+ %%
+ %% If 4 entries were inserted, the index of each entry would
+ %% be [3, 2, 1, 0]. If 4 entries were inserted and 1 of them
+ %% was later dropped, the index of each entry remaining would
+ %% be [3, 2, 1] and the insert_count value would be 3, allowing
+ %% us to know what index the newest entry is using.
+ dyn_table = [] :: [{pos_integer(), {binary(), binary()}}],
+
+ %% Decoder-specific state.
+
+ %% We keep track of streams that are currently blocked
+ %% in a map for easy counting and removal. A stream may
+ %% be blocked at the beginning of the decoding process.
+ %% A stream may be unblocked after encoder instructions
+ %% have been processed.
+ blocked_streams = #{} :: #{cow_http3:stream_id() => true},
+
+ %% Encoder-specific state.
+
+ %% We keep track of the known received count of the
+ %% decoder (the insert_count it has that we know of)
+ %% so that we know when we can evict an entry we
+ %% inserted. We cannot evict an entry before it has
+ %% been acknowledged. The known received count can
+ %% also be used to avoid blocking.
+ known_received_count = 0 :: non_neg_integer(),
+
+ %% We keep track of the streams that have used the
+ %% dynamic table in order to increase the known
+ %% received count when the decoder acks a stream.
+ %% We only keep the insert_count value for a stream's
+ %% field section.
+ %%
+ %% Because a stream can send multiple field sections
+ %% (informational response, final response, trailers),
+ %% we use a list to keep track of the different sections.
+ %% A FIFO structure would be more adequate but we do
+ %% not expect a lot of field sections per stream.
+ references = #{} :: #{cow_http3:stream_id() => [non_neg_integer()]},
+
+ %% Smallest absolute index the encoder will reference.
+ %% Indexes below may exist in the dynamic table but are
+ %% in the process of being phased out and will eventually
+ %% be evicted. Only duplicating these indexes is allowed.
+ draining_index = 0 :: non_neg_integer(),
+
+ %% Size of the dynamic table that is available for
+ %% eviction during encoding. Both this value and the
+ %% draining_index are computed at the start of encoding.
+ %% Note that for the encoder this cannot reach negatives,
+ %% but might for the decoder.
+ draining_size = 0 :: integer()
+}).
+
+-opaque state() :: #state{}.
+-export_type([state/0]).
+
+-type error() :: qpack_decompression_failed
+ | qpack_encoder_stream_error
+ | qpack_decoder_stream_error.
+-export_type([error/0]).
+
+-type encoder_opts() :: #{
+ huffman => boolean()
+}.
+-export_type([encoder_opts/0]).
+
+%-ifdef(TEST).
+%-include_lib("proper/include/proper.hrl").
+%-endif.
+
+-include("cow_hpack_common.hrl").
+
+%% State initialization.
+
+-spec init(decoder | encoder) -> state().
+
+init(Role) ->
+ init(Role, 4096, 0).
+
+-spec init(decoder | encoder, non_neg_integer(), non_neg_integer()) -> state().
+
+init(Role, MaxTableCapacity, MaxBlockedStreams) ->
+ #state{
+ settings_received=Role =:= decoder,
+ max_table_capacity=MaxTableCapacity,
+ max_blocked_streams=MaxBlockedStreams
+ }.
+
+%% Decoding.
+
+-spec decode_field_section(binary(), cow_http3:stream_id(), State)
+ -> {ok, cow_http:headers(), binary(), State}
+ | {blocked, State}
+ | {connection_error, error(), atom()}
+ when State::state().
+
+decode_field_section(Data, StreamID, State=#state{max_blocked_streams=MaxBlockedStreams,
+ insert_count=InsertCount, blocked_streams=BlockedStreams}) ->
+ {EncInsertCount, Rest} = dec_big_int(Data, 0, 0),
+ ReqInsertCount = decode_req_insert_count(EncInsertCount, State),
+ if
+ ReqInsertCount =< InsertCount ->
+ decode_field_section(Rest, StreamID, State, ReqInsertCount);
+ %% The stream is blocked and we do not allow that;
+ %% or there are already too many blocked streams.
+ map_size(BlockedStreams) > MaxBlockedStreams ->
+ {connection_error, qpack_decompression_failed,
+ 'More blocked streams than configuration allows. (RFC9204 2.1.2)'};
+ %% The stream is blocked and we allow that.
+ %% The caller must keep the data and retry after
+ %% calling the execute_encoder_instructions function.
+ true ->
+ {blocked, State#state{blocked_streams=BlockedStreams#{StreamID => true}}}
+ end.
+
+decode_field_section(<<S:1,Rest0/bits>>, StreamID,
+ State0=#state{blocked_streams=BlockedStreams}, ReqInsertCount) ->
+ State1 = State0#state{
+ %% The stream may have been blocked. Unblock it.
+ blocked_streams=maps:remove(StreamID, BlockedStreams),
+ %% Reset the draining_size. We don't use it, but don't
+ %% want the value to unnecessarily become a big int.
+ draining_size=0
+ },
+ {DeltaBase, Rest} = dec_int7(Rest0),
+ Base = case S of
+ 0 -> ReqInsertCount + DeltaBase;
+ 1 -> ReqInsertCount - DeltaBase - 1
+ end,
+ case decode(Rest, State1, Base, []) of
+ {ok, Headers, State} when ReqInsertCount =:= 0 ->
+ {ok, Headers, <<>>, State};
+ {ok, Headers, State} ->
+ {ok, Headers, enc_int7(StreamID, 2#1), State}
+ end.
+
+decode_req_insert_count(0, _) ->
+ 0;
+decode_req_insert_count(EncInsertCount, #state{
+ max_table_capacity=MaxTableCapacity, insert_count=InsertCount}) ->
+ MaxEntries = MaxTableCapacity div 32,
+ FullRange = 2 * MaxEntries,
+ if
+ EncInsertCount > FullRange ->
+ {connection_error, qpack_decompression_failed,
+ 'EncInsertCount larger than maximum possible value. (RFC9204 4.5.1.1)'};
+ true ->
+ MaxValue = InsertCount + MaxEntries,
+ MaxWrapped = (MaxValue div FullRange) * FullRange,
+ ReqInsertCount = MaxWrapped + EncInsertCount - 1,
+ if
+ ReqInsertCount > MaxValue ->
+ if
+ ReqInsertCount =< FullRange ->
+ {connection_error, qpack_decompression_failed,
+ 'ReqInsertCount value larger than current maximum value. (RFC9204 4.5.1.1)'};
+ true ->
+ ReqInsertCount - FullRange
+ end;
+ ReqInsertCount =:= 0 ->
+ {connection_error, qpack_decompression_failed,
+ 'ReqInsertCount value of 0 must be encoded as 0. (RFC9204 4.5.1.1)'};
+ true ->
+ ReqInsertCount
+ end
+ end.
+
+decode(<<>>, State, _, Acc) ->
+ {ok, lists:reverse(Acc), State};
+%% Indexed field line.
+decode(<<2#1:1,T:1,Rest0/bits>>, State, Base, Acc) ->
+ {Index, Rest} = dec_int6(Rest0),
+ Entry = case T of
+ 0 -> table_get_dyn_pre_base(Index, Base, State);
+ 1 -> table_get_static(Index)
+ end,
+ decode(Rest, State, Base, [Entry|Acc]);
+%% Indexed field line with post-base index.
+decode(<<2#0001:4,Rest0/bits>>, State, Base, Acc) ->
+ {Index, Rest} = dec_int4(Rest0),
+ Entry = table_get_dyn_post_base(Index, Base, State),
+ decode(Rest, State, Base, [Entry|Acc]);
+%% Literal field line with name reference.
+decode(<<2#01:2,_N:1,T:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int4(Rest0),
+ Name = case T of
+ 0 -> table_get_name_dyn_rel(NameIndex, State);
+ 1 -> table_get_name_static(NameIndex)
+ end,
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ decode(Rest, State, Base, [{Name, Value}|Acc]);
+%% Literal field line with post-base name reference.
+decode(<<2#0000:4,_N:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int3(Rest0),
+ Name = table_get_name_dyn_post_base(NameIndex, Base, State),
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ decode(Rest, State, Base, [{Name, Value}|Acc]);
+%% Literal field line with literal name.
+decode(<<2#001:3,_N:1,NameH:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameLen, Rest1} = dec_int3(Rest0),
+ <<NameStr:NameLen/binary,ValueH:1,Rest2/bits>> = Rest1,
+ {Name, <<>>} = maybe_dec_huffman(NameStr, NameLen, NameH),
+ {ValueLen, Rest3} = dec_int7(Rest2),
+ {Value, Rest} = maybe_dec_huffman(Rest3, ValueLen, ValueH),
+ decode(Rest, State, Base, [{Name, Value}|Acc]).
+
+-spec execute_encoder_instructions(binary(), State)
+ -> {ok, binary(), State}
+ | {connection_error, qpack_encoder_stream_error, atom()}
+ when State::state().
+
+execute_encoder_instructions(Data, State) ->
+ execute_encoder_instructions(Data, State, 0).
+
+execute_encoder_instructions(<<>>, State, 0) ->
+ {ok, <<>>, State};
+execute_encoder_instructions(<<>>, State, Increment) ->
+ {ok, enc_int6(Increment, 2#00), State};
+%% Set dynamic table capacity.
+execute_encoder_instructions(<<2#001:3,Rest0/bits>>, State=#state{
+ max_table_capacity=MaxTableCapacity, capacity=Capacity0,
+ dyn_table=DynamicTable0}, Increment) ->
+ {Capacity, Rest} = dec_int5(Rest0),
+ if
+ %% Capacity larger than configured, or dynamic table
+ %% disabled when max_table_capacity=0.
+ Capacity > MaxTableCapacity ->
+ {connection_error, qpack_encoder_stream_error,
+ 'New table capacity higher than SETTINGS_QPACK_MAX_TABLE_CAPACITY. (RFC9204 3.2.3, RFC9204 4.3.1)'};
+ %% Table capacity was reduced. We must evict entries.
+ Capacity < Capacity0 ->
+ {DynamicTable, Size} = table_evict(DynamicTable0, Capacity, 0, []),
+ execute_encoder_instructions(Rest, State#state{capacity=Capacity,
+ size=Size, dyn_table=DynamicTable}, Increment);
+ %% Table capacity equal or higher than previous.
+ true ->
+ execute_encoder_instructions(Rest,
+ State#state{capacity=Capacity}, Increment)
+ end;
+%% Insert with name reference.
+execute_encoder_instructions(<<2#1:1,T:1,Rest0/bits>>, State, Increment) ->
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int6(Rest0),
+ Name = case T of
+ 0 -> table_get_name_dyn_rel(NameIndex, State);
+ 1 -> table_get_name_static(NameIndex)
+ end,
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ execute_insert_instruction(Rest, State, Increment, {Name, Value});
+%% Insert with literal name.
+execute_encoder_instructions(<<2#01:2,NameH:1,Rest0/bits>>, State, Increment) ->
+ {NameLen, Rest1} = dec_int5(Rest0),
+ {Name, <<ValueH:1,Rest2/bits>>} = maybe_dec_huffman(Rest1, NameLen, NameH),
+ {ValueLen, Rest3} = dec_int7(Rest2),
+ {Value, Rest} = maybe_dec_huffman(Rest3, ValueLen, ValueH),
+ execute_insert_instruction(Rest, State, Increment, {Name, Value});
+%% Duplicate.
+execute_encoder_instructions(<<2#000:3,Rest0/bits>>, State, Increment) ->
+ {Index, Rest} = dec_int5(Rest0),
+ Entry = table_get_dyn_rel(Index, State),
+ execute_insert_instruction(Rest, State, Increment, Entry).
+
+execute_insert_instruction(Rest, State0, Increment, Entry) ->
+ case table_insert(Entry, State0) of
+ {ok, State} ->
+ execute_encoder_instructions(Rest, State, Increment + 1);
+ Error = {connection_error, _, _} ->
+ Error
+ end.
+
+%% @todo Export / spec.
+
+decoder_cancel_stream(StreamID) ->
+ enc_int6(StreamID, 2#01).
+
+dec_int3(<<2#111:3,Rest/bits>>) ->
+ dec_big_int(Rest, 7, 0);
+dec_int3(<<Int:3,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int4(<<2#1111:4,Rest/bits>>) ->
+ dec_big_int(Rest, 15, 0);
+dec_int4(<<Int:4,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int6(<<2#111111:6,Rest/bits>>) ->
+ dec_big_int(Rest, 63, 0);
+dec_int6(<<Int:6,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int7(<<2#1111111:7,Rest/bits>>) ->
+ dec_big_int(Rest, 127, 0);
+dec_int7(<<Int:7,Rest/bits>>) ->
+ {Int, Rest}.
+
+maybe_dec_huffman(Data, ValueLen, 0) ->
+ <<Value:ValueLen/binary,Rest/bits>> = Data,
+ {Value, Rest};
+maybe_dec_huffman(Data, ValueLen, 1) ->
+ dec_huffman(Data, ValueLen, 0, <<>>).
+
+-ifdef(TEST).
+appendix_b_decoder_test() ->
+ %% Stream: 0
+ {ok, [
+ {<<":path">>, <<"/index.html">>}
+ ], <<>>, DecState0} = decode_field_section(<<
+ 16#0000:16,
+ 16#510b:16, 16#2f69:16, 16#6e64:16, 16#6578:16,
+ 16#2e68:16, 16#746d:16, 16#6c
+ >>, 0, init(decoder, 4096, 0)),
+ #state{
+ capacity=0,
+ size=0,
+ insert_count=0,
+ dyn_table=[]
+ } = DecState0,
+ %% Stream: Encoder
+ {ok, EncData1, DecState1} = execute_encoder_instructions(<<
+ 16#3fbd01:24,
+ 16#c00f:16, 16#7777:16, 16#772e:16, 16#6578:16,
+ 16#616d:16, 16#706c:16, 16#652e:16, 16#636f:16,
+ 16#6d,
+ 16#c10c:16, 16#2f73:16, 16#616d:16, 16#706c:16,
+ 16#652f:16, 16#7061:16, 16#7468:16
+ >>, DecState0),
+ <<2#00:2,2:6>> = EncData1,
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState1,
+ %% Stream: 4
+ {ok, [
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/sample/path">>}
+ ], <<16#84>>, DecState2} = decode_field_section(<<
+ 16#0381:16,
+ 16#10,
+ 16#11
+ >>, 4, DecState1),
+ DecState1 = DecState2,
+ %% Stream: Encoder
+ {ok, EncData3, DecState3} = execute_encoder_instructions(<<
+ 16#4a63:16, 16#7573:16, 16#746f:16, 16#6d2d:16,
+ 16#6b65:16, 16#790c:16, 16#6375:16, 16#7374:16,
+ 16#6f6d:16, 16#2d76:16, 16#616c:16, 16#7565:16
+ >>, DecState2),
+ <<2#00:2,1:6>> = EncData3,
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState3,
+ %% Stream: Encoder
+ {ok, EncData4, DecState4} = execute_encoder_instructions(<<
+ 16#02
+ >>, DecState3),
+ <<2#00:2,1:6>> = EncData4,
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState4,
+ %% Stream: 8
+ %%
+ %% Note that this one is not really received by the decoder
+ %% so we will ignore the decoder state before we continue.
+ {ok, [
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/">>},
+ {<<"custom-key">>, <<"custom-value">>}
+ ], <<16#88>>, IgnoredDecState} = decode_field_section(<<
+ 16#0500:16,
+ 16#80,
+ 16#c1,
+ 16#81
+ >>, 8, DecState4),
+ %% Note that the state did not change anyway.
+ DecState4 = IgnoredDecState,
+ %% Stream: Decoder - Stream Cancellation (Stream=8)
+ <<16#48>> = decoder_cancel_stream(8),
+ {ok, EncData5, DecState5} = execute_encoder_instructions(<<
+ 16#810d:16, 16#6375:16, 16#7374:16, 16#6f6d:16,
+ 16#2d76:16, 16#616c:16, 16#7565:16, 16#32
+ >>, DecState4),
+ <<2#00:2,1:6>> = EncData5,
+ #state{
+ capacity=220,
+ size=215,
+ insert_count=5,
+ dyn_table=[
+ {55, {<<"custom-key">>, <<"custom-value2">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}}
+ ]
+ } = DecState5,
+ ok.
+-endif.
+
+%% Encoding.
+
+-spec encode_field_section(cow_http:headers(), cow_http3:stream_id(), State)
+ -> {ok, iolist(), iolist(), State} when State::state().
+
+%% @todo Would be good to know encoder stream flow control to avoid writing there. Opts?
+encode_field_section(Headers, StreamID, State0) ->
+ encode_field_section(Headers, StreamID, State0, #{}).
+
+-spec encode_field_section(cow_http:headers(), cow_http3:stream_id(), State, encoder_opts())
+ -> {ok, iolist(), iolist(), State} when State::state().
+
+encode_field_section(Headers, StreamID, State0=#state{
+ max_table_capacity=MaxTableCapacity, insert_count=InsertCount,
+ references=Refs0}, Opts) ->
+ State1 = encode_update_drain_info(State0),
+ Base = InsertCount + 1,
+ {ReqInsertCount, EncData, Data, State} = encode(
+ Headers, StreamID, State1,
+ huffman_opt(Opts), 0, Base, [], []),
+ case ReqInsertCount of
+ 0 ->
+ {ok, [<<0:16>>|Data], EncData, State};
+ _ ->
+ MaxEntries = MaxTableCapacity div 32,
+ EncInsertCount = (ReqInsertCount rem (2 * MaxEntries)) + 1,
+ {S, DeltaBase} = if
+ %% We inserted new entries.
+ ReqInsertCount > Base ->
+ {2#1, ReqInsertCount - Base};
+ %% We only used existing entries.
+ ReqInsertCount =< Base ->
+ {2#0, ReqInsertCount - Base}
+ end,
+ %% Save the reference to avoid draining entries too quickly.
+ Refs = case Refs0 of
+ #{StreamID := ICs} ->
+ Refs0#{StreamID => [ReqInsertCount|ICs]};
+ _ ->
+ Refs0#{StreamID => [ReqInsertCount]}
+ end,
+ {ok, [enc_big_int(EncInsertCount, <<>>), enc_int7(DeltaBase, S)|Data], EncData,
+ State#state{references=Refs}}
+ end.
+
+%% We check how many entries we can evict. The result
+%% will take the form of a draining_index (the oldest
+%% entry the encoder can reference) as well as a
+%% draining_size (how much data can be gained by evicting).
+%%
+%% We first look at streams that have not been acknowledged
+%% and find the smallest insert_count value from them. We
+%% cannot evict any value that is newer than or equal to
+%% that value.
+%%
+%% Then we also need to make sure we don't evict too much
+%% from the table.
+%%
+%% Finally we go over the dynamic table to count how much
+%% we can actually drain and what the draining index really is.
+encode_update_drain_info(State=#state{max_table_capacity=MaxCapacity,
+ insert_count=InsertCount, dyn_table=DynTable, references=Refs}) ->
+ PendingInsertCount = if
+ %% When we don't use the dynamic table, or we didn't insert
+ %% anything yet, there are no references. We can drain
+ %% everything but are still constrained by the max draining size.
+ Refs =:= #{} ->
+ InsertCount;
+ true ->
+ maps:fold(fun(_, ICs, V) ->
+ IC = hd(lists:reverse(ICs)),
+ case V of
+ undefined -> IC;
+ _ -> min(IC, V)
+ end
+ end, undefined, Refs)
+ end,
+ %% We use a simple formula for calculating the maximum
+ %% draining size, found in nginx: we allow evicting
+ %% between 1/8th of the current table capacity and
+ %% 512 bytes, whichever is smaller. When the maximum
+ %% table capacity is small this formula may get us
+ %% a value that's too small to drain anything, so
+ %% we use 64 bytes as a minimum.
+ MaxDrainingSize0 = min(512, MaxCapacity div 8),
+ MaxDrainingSize = if
+ MaxDrainingSize0 < 64 -> 64;
+ true -> MaxDrainingSize0
+ end,
+ {DrainingIndex, DrainingSize} =
+ encode_update_drain_loop(lists:reverse(DynTable),
+ InsertCount - length(DynTable), PendingInsertCount,
+ 0, MaxDrainingSize),
+ State#state{
+ draining_index=DrainingIndex,
+ draining_size=DrainingSize
+ }.
+
+%% We go over the dynamic table in reverse order. We stop
+%% when we either reach the PendingInsertCount value or get
+%% above MaxDrainingSize. It's not possible to go over the
+%% entire dynamic table because we have references.
+encode_update_drain_loop(_, Index, PendingIndex, Size, _)
+ when Index =:= PendingIndex ->
+ {Index, Size};
+encode_update_drain_loop([{EntrySize, _}|_], Index, _, Size, MaxSize)
+ when Size + EntrySize > MaxSize ->
+ {Index, Size};
+encode_update_drain_loop([{EntrySize, _}|Tail], Index, PendingIndex, Size, MaxSize) ->
+ encode_update_drain_loop(Tail, Index + 1, PendingIndex, Size + EntrySize, MaxSize).
+
+encode([], _StreamID, State, _HuffmanOpt,
+ ReqInsertCount, _Base, EncAcc, Acc) ->
+ {ReqInsertCount, lists:reverse(EncAcc), lists:reverse(Acc), State};
+encode([{Name, Value0}|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc) ->
+ %% We conditionally call iolist_to_binary/1 because a small
+ %% but noticeable speed improvement happens when we do this.
+ %% (Or at least it did for cow_hpack.)
+ Value = if
+ is_binary(Value0) -> Value0;
+ true -> iolist_to_binary(Value0)
+ end,
+ Entry = {Name, Value},
+ encode_static([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc).
+
+encode_static([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc) ->
+ case table_find_static(Entry) of
+ not_found ->
+ encode_dyn([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc);
+ StaticIndex ->
+ encode(Tail, StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc,
+ %% Indexed Field Line. T=1 (static).
+ [enc_int6(StaticIndex, 2#11)|Acc])
+ end.
+
+encode_dyn([Entry|Tail], StreamID, State0=#state{draining_index=DrainingIndex},
+ HuffmanOpt, ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_dyn(Entry, State0) of
+ not_found ->
+ encode_static_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc);
+ %% When the index is below the drain index and there is enough
+ %% space in the table for duplicating the value, we do that
+ %% and use the duplicated index. If we can't then we must not
+ %% use the dynamic index for the field.
+ DynIndex when DynIndex < DrainingIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ %% We must reference the relative index of the entry we duplicated
+ %% before we duplicated it. The newest entry starts at 0. If we
+ %% have 3 entries in the table, the oldest one will have a relative
+ %% index of 2. Because we already inserted the duplicate, our
+ %% ReqInsertCount has 1 added, so for our previously 3 entries
+ %% table, we end up with a ReqInsertCount of 4. This means we
+ %% have to remove 2 from the difference to find the relative index.
+ DynIndexRel = ReqInsertCount - DynIndex - 2,
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Duplicate.
+ [[EncInstr|enc_int5(DynIndexRel, 2#000)]|EncAcc],
+ %% Indexed Field Line. T=0 (dynamic).
+ [enc_int6(Base - ReqInsertCount, 2#10)|Acc]);
+ false ->
+ encode_static_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc)
+ end;
+ DynIndex ->
+ ReqInsertCount = max(ReqInsertCount0, DynIndex),
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount, Base, EncAcc,
+ %% Indexed Field Line. T=0 (dynamic).
+ [enc_int6(Base - DynIndex - 1, 2#10)|Acc])
+ end.
+
+encode_static_name([Entry = {Name, Value}|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_name_static(Name) of
+ not_found ->
+ encode_dyn_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc);
+ StaticNameIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Name Reference. T=1 (static).
+ [[EncInstr, enc_int6(StaticNameIndex, 2#11)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Name Reference. N=0. T=1 (static).
+ [[enc_int4(StaticNameIndex, 2#0101)|enc_str(Value, HuffmanOpt)]|Acc])
+ end
+ end.
+
+encode_dyn_name([Entry = {Name, Value}|Tail], StreamID,
+ State0=#state{draining_index=DrainingIndex},
+ HuffmanOpt, ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_name_dyn(Name, State0) of
+ %% We can reference the dynamic name.
+ DynIndex when is_integer(DynIndex), DynIndex >= DrainingIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ %% See comment in encode_dyn for why we remove 2.
+ DynIndexRel = ReqInsertCount - DynIndex - 2,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Name Reference. T=0 (dynamic).
+ [[EncInstr, enc_int6(DynIndexRel, 2#10)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Name Reference. N=0. T=0 (dynamic).
+ [[enc_int4(DynIndex, 2#0100)|enc_str(Value, HuffmanOpt)]|Acc])
+ end;
+ %% When there are no name to reference, or the name
+ %% is found below the drain index, we do not attempt
+ %% to refer to it.
+ _ ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Literal Name.
+ [[EncInstr, enc_str6(Name, HuffmanOpt, 2#01)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Literal Name. N=0.
+ [[enc_str4(Name, HuffmanOpt, 2#0010)|enc_str(Value, HuffmanOpt)]|Acc])
+ end
+ end.
+
+%% @todo We should make sure we have a large enough flow control window.
+%%
+%% We can never insert before receiving the SETTINGS frame.
+encode_can_insert(_, #state{settings_received=false}) ->
+ false;
+encode_can_insert({Name, Value}, State=#state{
+ max_table_capacity=MaxCapacity, capacity=Capacity,
+ size=Size, draining_size=DrainingSize}) ->
+ EntrySize = byte_size(Name) + byte_size(Value) + 32,
+ if
+ %% We have enough space in the current capacity,
+ %% without having to drain entries.
+ EntrySize + Size =< Capacity ->
+ {true, <<>>, State};
+ %% We have enough space if we increase the capacity.
+ %% We prefer to first increase the capacity to the
+ %% maximum before we start draining entries.
+ EntrySize + Size =< MaxCapacity ->
+ {true, enc_int5(MaxCapacity, 2#001),
+ State#state{capacity=MaxCapacity}};
+ %% We are already at max capacity and have enough
+ %% space if we drain entries.
+ EntrySize + Size =< Capacity + DrainingSize, Capacity =:= MaxCapacity ->
+ {true, <<>>, State};
+ %% We are not at max capacity. We have enough space
+ %% if we both increase the capacity and drain entries.
+ EntrySize + Size =< MaxCapacity + DrainingSize ->
+ {true, enc_int5(MaxCapacity, 2#001),
+ State#state{capacity=MaxCapacity}};
+ true ->
+ false
+ end.
+
+-spec execute_decoder_instructions(binary(), State)
+ -> {ok, State} | {connection_error, qpack_decoder_stream_error, atom()}
+ when State::state().
+execute_decoder_instructions(<<>>, State) ->
+ {ok, State};
+%% Section acknowledgement.
+%% We remove one reference and if needed increase the known received count.
+execute_decoder_instructions(<<2#1:1,Rest0/bits>>, State=#state{
+ known_received_count=KnownReceivedCount0, references=Refs}) ->
+ {StreamID, Rest} = dec_int7(Rest0),
+ case Refs of
+ #{StreamID := [InsertCount]} ->
+ KnownReceivedCount = max(KnownReceivedCount0, InsertCount),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount,
+ references=maps:remove(StreamID, Refs)});
+ #{StreamID := InsertCounts} ->
+ [InsertCount|InsertCountsTail] = lists:reverse(InsertCounts),
+ KnownReceivedCount = max(KnownReceivedCount0, InsertCount),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount,
+ references=Refs#{StreamID => lists:reverse(InsertCountsTail)}});
+ _ ->
+ {connection_error, qpack_decoder_stream_error,
+ 'Acknowledgement received for stream with no pending sections. (RFC9204 4.4.1)'}
+ end;
+%% Stream cancellation.
+%% We drop all references for the given stream.
+execute_decoder_instructions(<<2#01:2,Rest0/bits>>, State=#state{references=Refs}) ->
+ {StreamID, Rest} = dec_int6(Rest0),
+ case Refs of
+ #{StreamID := _} ->
+ execute_decoder_instructions(Rest, State#state{
+ references=maps:remove(StreamID, Refs)});
+ %% It is not an error for the reference to not exist.
+ %% The dynamic table may not have been used for this
+ %% stream.
+ _ ->
+ execute_decoder_instructions(Rest, State)
+ end;
+%% Insert count increment.
+%% We increase the known received count.
+execute_decoder_instructions(<<2#00:2,Rest0/bits>>, State=#state{
+ known_received_count=KnownReceivedCount}) ->
+ {Increment, Rest} = dec_int6(Rest0),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount + Increment}).
+
+%% Inform the encoder of the relevant SETTINGS from the decoder.
+%% The encoder will choose the smallest value between what it
+%% has configured and what it received through SETTINGS. Should
+%% there be no value in the SETTINGS then 0 must be given.
+
+-spec encoder_set_settings(non_neg_integer(), non_neg_integer(), state()) -> state().
+
+encoder_set_settings(MaxTableCapacity, MaxBlockedStreams, State=#state{
+ max_table_capacity=MaxTableCapacityConfigured,
+ max_blocked_streams=MaxBlockedStreamsConfigured}) ->
+ State#state{
+ settings_received=true,
+ max_table_capacity=min(MaxTableCapacity, MaxTableCapacityConfigured),
+ max_blocked_streams=min(MaxBlockedStreams, MaxBlockedStreamsConfigured)
+ }.
+
+huffman_opt(#{huffman := false}) -> no_huffman;
+huffman_opt(_) -> huffman.
+
+enc_int3(Int, Prefix) when Int < 7 ->
+ <<Prefix:5, Int:3>>;
+enc_int3(Int, Prefix) ->
+ enc_big_int(Int - 7, <<Prefix:5, 2#111:3>>).
+
+enc_int4(Int, Prefix) when Int < 15 ->
+ <<Prefix:4, Int:4>>;
+enc_int4(Int, Prefix) ->
+ enc_big_int(Int - 15, <<Prefix:4, 2#1111:4>>).
+
+enc_str4(Str, huffman, Prefix) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int3(byte_size(Str2), Prefix * 2 + 2#1)|Str2];
+enc_str4(Str, no_huffman, Prefix) ->
+ [enc_int3(byte_size(Str), Prefix * 2 + 2#0)|Str].
+
+enc_str6(Str, huffman, Prefix) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int5(byte_size(Str2), Prefix * 2 + 2#1)|Str2];
+enc_str6(Str, no_huffman, Prefix) ->
+ [enc_int5(byte_size(Str), Prefix * 2 + 2#0)|Str].
+
+-ifdef(TEST).
+%% This function is a good starting point to let the calling
+%% process insert entries in the dynamic table outside of
+%% encoding a field section. To be usable more broadly
+%% it would need to handle the case where a static name
+%% is found, but also consider how it should be used:
+%% do we have capacity in the table? We don't have
+%% capacity before receiving the SETTINGS frame. Until
+%% then it will be restricted to testing.
+encoder_insert_entry(Entry={Name, Value}, State0, Opts) ->
+ {ok, State} = table_insert(Entry, State0),
+ HuffmanOpt = huffman_opt(Opts),
+ case table_find_name_static(Name) of
+ not_found ->
+ case table_find_name_dyn(Name, State0) of
+ not_found ->
+ %% Insert with Literal Name.
+ {ok, [enc_str6(Name, HuffmanOpt, 2#01)|enc_str(Value, HuffmanOpt)], State};
+ DynNameIndex ->
+ #state{insert_count=ReqInsertCount} = State,
+ %% See comment in encode_dyn for why we remove 2.
+ DynNameIndexRel = ReqInsertCount - DynNameIndex - 2,
+ %% Insert with Name Reference. T=0 (dynamic).
+ {ok, [enc_int6(DynNameIndexRel, 2#10)|enc_str(Value, HuffmanOpt)], State}
+ end
+ end.
+
+appendix_b_encoder_test() ->
+ %% We limit the encoder to 220 bytes for table capacity.
+ EncState0 = init(encoder, 220, 0),
+ %% Stream: 0
+ {ok, Data1, EncData1, EncState1} = encode_field_section([
+ {<<":path">>, <<"/index.html">>}
+ ], 0, EncState0, #{huffman => false}),
+ <<>> = iolist_to_binary(EncData1),
+ <<
+ 16#0000:16,
+ 16#510b:16, 16#2f69:16, 16#6e64:16, 16#6578:16,
+ 16#2e68:16, 16#746d:16, 16#6c
+ >> = iolist_to_binary(Data1),
+ #state{
+ capacity=0,
+ size=0,
+ insert_count=0,
+ dyn_table=[]
+ } = EncState1,
+ %% Simulate receiving of the SETTINGS frame enabling the dynamic table.
+ EncState2 = encoder_set_settings(4096, 0, EncState1),
+ #state{
+ settings_received=true,
+ max_table_capacity=220,
+ capacity=0
+ } = EncState2,
+ %% Stream: 4 (and Encoder)
+ {ok, Data3, EncData3, EncState3} = encode_field_section([
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/sample/path">>}
+ ], 4, EncState2, #{huffman => false}),
+ <<
+ 16#3fbd01:24,
+ 16#c00f:16, 16#7777:16, 16#772e:16, 16#6578:16,
+ 16#616d:16, 16#706c:16, 16#652e:16, 16#636f:16,
+ 16#6d,
+ 16#c10c:16, 16#2f73:16, 16#616d:16, 16#706c:16,
+ 16#652f:16, 16#7061:16, 16#7468:16
+ >> = iolist_to_binary(EncData3),
+ <<
+ 16#0381:16,
+ 16#10,
+ 16#11
+ >> = iolist_to_binary(Data3),
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState3,
+ %% Stream: Decoder
+ {ok, EncState4} = execute_decoder_instructions(<<16#84>>, EncState3),
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState4,
+ %% Stream: Encoder
+ {ok, EncData5, EncState5} = encoder_insert_entry(
+ {<<"custom-key">>, <<"custom-value">>},
+ EncState4, #{huffman => false}),
+ <<
+ 16#4a63:16, 16#7573:16, 16#746f:16, 16#6d2d:16,
+ 16#6b65:16, 16#790c:16, 16#6375:16, 16#7374:16,
+ 16#6f6d:16, 16#2d76:16, 16#616c:16, 16#7565:16
+ >> = iolist_to_binary(EncData5),
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState5,
+ %% Stream: Decoder
+ {ok, EncState6} = execute_decoder_instructions(<<16#01>>, EncState5),
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState6,
+ %% Stream: 8 (and Encoder)
+ {ok, Data7, EncData7, EncState7} = encode_field_section([
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/">>},
+ {<<"custom-key">>, <<"custom-value">>}
+ ], 8, EncState6),
+ <<16#02>> = iolist_to_binary(EncData7),
+ <<
+ 16#0500:16,
+ 16#80,
+ 16#c1,
+ 16#81
+ >> = iolist_to_binary(Data7),
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState7,
+ %% Stream: Decoder
+ {ok, EncState8} = execute_decoder_instructions(<<16#48>>, EncState7),
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState8,
+ %% Stream: Encoder
+ {ok, EncData9, EncState9} = encoder_insert_entry(
+ {<<"custom-key">>, <<"custom-value2">>},
+ EncState8, #{huffman => false}),
+ <<
+ 16#810d:16, 16#6375:16, 16#7374:16, 16#6f6d:16,
+ 16#2d76:16, 16#616c:16, 16#7565:16, 16#32
+ >> = iolist_to_binary(EncData9),
+ #state{
+ capacity=220,
+ size=215,
+ insert_count=5,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {55, {<<"custom-key">>, <<"custom-value2">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}}
+ ]
+ } = EncState9,
+ ok.
+-endif.
+
+%% Static and dynamic tables.
+
+table_find_static({<<":authority">>, <<>>}) -> 0;
+table_find_static({<<":path">>, <<"/">>}) -> 1;
+table_find_static({<<"age">>, <<"0">>}) -> 2;
+table_find_static({<<"content-disposition">>, <<>>}) -> 3;
+table_find_static({<<"content-length">>, <<"0">>}) -> 4;
+table_find_static({<<"cookie">>, <<>>}) -> 5;
+table_find_static({<<"date">>, <<>>}) -> 6;
+table_find_static({<<"etag">>, <<>>}) -> 7;
+table_find_static({<<"if-modified-since">>, <<>>}) -> 8;
+table_find_static({<<"if-none-match">>, <<>>}) -> 9;
+table_find_static({<<"last-modified">>, <<>>}) -> 10;
+table_find_static({<<"link">>, <<>>}) -> 11;
+table_find_static({<<"location">>, <<>>}) -> 12;
+table_find_static({<<"referer">>, <<>>}) -> 13;
+table_find_static({<<"set-cookie">>, <<>>}) -> 14;
+table_find_static({<<":method">>, <<"CONNECT">>}) -> 15;
+table_find_static({<<":method">>, <<"DELETE">>}) -> 16;
+table_find_static({<<":method">>, <<"GET">>}) -> 17;
+table_find_static({<<":method">>, <<"HEAD">>}) -> 18;
+table_find_static({<<":method">>, <<"OPTIONS">>}) -> 19;
+table_find_static({<<":method">>, <<"POST">>}) -> 20;
+table_find_static({<<":method">>, <<"PUT">>}) -> 21;
+table_find_static({<<":scheme">>, <<"http">>}) -> 22;
+table_find_static({<<":scheme">>, <<"https">>}) -> 23;
+table_find_static({<<":status">>, <<"103">>}) -> 24;
+table_find_static({<<":status">>, <<"200">>}) -> 25;
+table_find_static({<<":status">>, <<"304">>}) -> 26;
+table_find_static({<<":status">>, <<"404">>}) -> 27;
+table_find_static({<<":status">>, <<"503">>}) -> 28;
+table_find_static({<<"accept">>, <<"*/*">>}) -> 29;
+table_find_static({<<"accept">>, <<"application/dns-message">>}) -> 30;
+table_find_static({<<"accept-encoding">>, <<"gzip, deflate, br">>}) -> 31;
+table_find_static({<<"accept-ranges">>, <<"bytes">>}) -> 32;
+table_find_static({<<"access-control-allow-headers">>, <<"cache-control">>}) -> 33;
+table_find_static({<<"access-control-allow-headers">>, <<"content-type">>}) -> 34;
+table_find_static({<<"access-control-allow-origin">>, <<"*">>}) -> 35;
+table_find_static({<<"cache-control">>, <<"max-age=0">>}) -> 36;
+table_find_static({<<"cache-control">>, <<"max-age=2592000">>}) -> 37;
+table_find_static({<<"cache-control">>, <<"max-age=604800">>}) -> 38;
+table_find_static({<<"cache-control">>, <<"no-cache">>}) -> 39;
+table_find_static({<<"cache-control">>, <<"no-store">>}) -> 40;
+table_find_static({<<"cache-control">>, <<"public, max-age=31536000">>}) -> 41;
+table_find_static({<<"content-encoding">>, <<"br">>}) -> 42;
+table_find_static({<<"content-encoding">>, <<"gzip">>}) -> 43;
+table_find_static({<<"content-type">>, <<"application/dns-message">>}) -> 44;
+table_find_static({<<"content-type">>, <<"application/javascript">>}) -> 45;
+table_find_static({<<"content-type">>, <<"application/json">>}) -> 46;
+table_find_static({<<"content-type">>, <<"application/x-www-form-urlencoded">>}) -> 47;
+table_find_static({<<"content-type">>, <<"image/gif">>}) -> 48;
+table_find_static({<<"content-type">>, <<"image/jpeg">>}) -> 49;
+table_find_static({<<"content-type">>, <<"image/png">>}) -> 50;
+table_find_static({<<"content-type">>, <<"text/css">>}) -> 51;
+table_find_static({<<"content-type">>, <<"text/html; charset=utf-8">>}) -> 52;
+table_find_static({<<"content-type">>, <<"text/plain">>}) -> 53;
+table_find_static({<<"content-type">>, <<"text/plain;charset=utf-8">>}) -> 54;
+table_find_static({<<"range">>, <<"bytes=0-">>}) -> 55;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000">>}) -> 56;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains">>}) -> 57;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains; preload">>}) -> 58;
+table_find_static({<<"vary">>, <<"accept-encoding">>}) -> 59;
+table_find_static({<<"vary">>, <<"origin">>}) -> 60;
+table_find_static({<<"x-content-type-options">>, <<"nosniff">>}) -> 61;
+table_find_static({<<"x-xss-protection">>, <<"1; mode=block">>}) -> 62;
+table_find_static({<<":status">>, <<"100">>}) -> 63;
+table_find_static({<<":status">>, <<"204">>}) -> 64;
+table_find_static({<<":status">>, <<"206">>}) -> 65;
+table_find_static({<<":status">>, <<"302">>}) -> 66;
+table_find_static({<<":status">>, <<"400">>}) -> 67;
+table_find_static({<<":status">>, <<"403">>}) -> 68;
+table_find_static({<<":status">>, <<"421">>}) -> 69;
+table_find_static({<<":status">>, <<"425">>}) -> 70;
+table_find_static({<<":status">>, <<"500">>}) -> 71;
+table_find_static({<<"accept-language">>, <<>>}) -> 72;
+%% These two values are technically invalid. An errata has already
+%% been submitted to the RFC. We must however continue to include
+%% them in the table for compatibility.
+table_find_static({<<"access-control-allow-credentials">>, <<"FALSE">>}) -> 73;
+table_find_static({<<"access-control-allow-credentials">>, <<"TRUE">>}) -> 74;
+table_find_static({<<"access-control-allow-headers">>, <<"*">>}) -> 75;
+table_find_static({<<"access-control-allow-methods">>, <<"get">>}) -> 76;
+table_find_static({<<"access-control-allow-methods">>, <<"get, post, options">>}) -> 77;
+table_find_static({<<"access-control-allow-methods">>, <<"options">>}) -> 78;
+table_find_static({<<"access-control-expose-headers">>, <<"content-length">>}) -> 79;
+table_find_static({<<"access-control-request-headers">>, <<"content-type">>}) -> 80;
+table_find_static({<<"access-control-request-method">>, <<"get">>}) -> 81;
+table_find_static({<<"access-control-request-method">>, <<"post">>}) -> 82;
+table_find_static({<<"alt-svc">>, <<"clear">>}) -> 83;
+table_find_static({<<"authorization">>, <<>>}) -> 84;
+table_find_static({<<"content-security-policy">>, <<"script-src 'none'; object-src 'none'; base-uri 'none'">>}) -> 85;
+table_find_static({<<"early-data">>, <<"1">>}) -> 86;
+table_find_static({<<"expect-ct">>, <<>>}) -> 87;
+table_find_static({<<"forwarded">>, <<>>}) -> 88;
+table_find_static({<<"if-range">>, <<>>}) -> 89;
+table_find_static({<<"origin">>, <<>>}) -> 90;
+table_find_static({<<"purpose">>, <<"prefetch">>}) -> 91;
+table_find_static({<<"server">>, <<>>}) -> 92;
+table_find_static({<<"timing-allow-origin">>, <<"*">>}) -> 93;
+table_find_static({<<"upgrade-insecure-requests">>, <<"1">>}) -> 94;
+table_find_static({<<"user-agent">>, <<>>}) -> 95;
+table_find_static({<<"x-forwarded-for">>, <<>>}) -> 96;
+table_find_static({<<"x-frame-options">>, <<"deny">>}) -> 97;
+table_find_static({<<"x-frame-options">>, <<"sameorigin">>}) -> 98;
+table_find_static(_) -> not_found.
+
+table_find_name_static(<<":authority">>) -> 0;
+table_find_name_static(<<":path">>) -> 1;
+table_find_name_static(<<"age">>) -> 2;
+table_find_name_static(<<"content-disposition">>) -> 3;
+table_find_name_static(<<"content-length">>) -> 4;
+table_find_name_static(<<"cookie">>) -> 5;
+table_find_name_static(<<"date">>) -> 6;
+table_find_name_static(<<"etag">>) -> 7;
+table_find_name_static(<<"if-modified-since">>) -> 8;
+table_find_name_static(<<"if-none-match">>) -> 9;
+table_find_name_static(<<"last-modified">>) -> 10;
+table_find_name_static(<<"link">>) -> 11;
+table_find_name_static(<<"location">>) -> 12;
+table_find_name_static(<<"referer">>) -> 13;
+table_find_name_static(<<"set-cookie">>) -> 14;
+table_find_name_static(<<":method">>) -> 15;
+table_find_name_static(<<":scheme">>) -> 22;
+table_find_name_static(<<":status">>) -> 24;
+table_find_name_static(<<"accept">>) -> 29;
+table_find_name_static(<<"accept-encoding">>) -> 31;
+table_find_name_static(<<"accept-ranges">>) -> 32;
+table_find_name_static(<<"access-control-allow-headers">>) -> 33;
+table_find_name_static(<<"access-control-allow-origin">>) -> 35;
+table_find_name_static(<<"cache-control">>) -> 36;
+table_find_name_static(<<"content-encoding">>) -> 42;
+table_find_name_static(<<"content-type">>) -> 44;
+table_find_name_static(<<"range">>) -> 55;
+table_find_name_static(<<"strict-transport-security">>) -> 56;
+table_find_name_static(<<"vary">>) -> 59;
+table_find_name_static(<<"x-content-type-options">>) -> 61;
+table_find_name_static(<<"x-xss-protection">>) -> 62;
+table_find_name_static(<<"accept-language">>) -> 72;
+table_find_name_static(<<"access-control-allow-credentials">>) -> 73;
+table_find_name_static(<<"access-control-allow-methods">>) -> 76;
+table_find_name_static(<<"access-control-expose-headers">>) -> 79;
+table_find_name_static(<<"access-control-request-headers">>) -> 80;
+table_find_name_static(<<"access-control-request-method">>) -> 81;
+table_find_name_static(<<"alt-svc">>) -> 83;
+table_find_name_static(<<"authorization">>) -> 84;
+table_find_name_static(<<"content-security-policy">>) -> 85;
+table_find_name_static(<<"early-data">>) -> 86;
+table_find_name_static(<<"expect-ct">>) -> 87;
+table_find_name_static(<<"forwarded">>) -> 88;
+table_find_name_static(<<"if-range">>) -> 89;
+table_find_name_static(<<"origin">>) -> 90;
+table_find_name_static(<<"purpose">>) -> 91;
+table_find_name_static(<<"server">>) -> 92;
+table_find_name_static(<<"timing-allow-origin">>) -> 93;
+table_find_name_static(<<"upgrade-insecure-requests">>) -> 94;
+table_find_name_static(<<"user-agent">>) -> 95;
+table_find_name_static(<<"x-forwarded-for">>) -> 96;
+table_find_name_static(<<"x-frame-options">>) -> 97;
+table_find_name_static(_) -> not_found.
+
+table_get_static(0) -> {<<":authority">>, <<>>};
+table_get_static(1) -> {<<":path">>, <<"/">>};
+table_get_static(2) -> {<<"age">>, <<"0">>};
+table_get_static(3) -> {<<"content-disposition">>, <<>>};
+table_get_static(4) -> {<<"content-length">>, <<"0">>};
+table_get_static(5) -> {<<"cookie">>, <<>>};
+table_get_static(6) -> {<<"date">>, <<>>};
+table_get_static(7) -> {<<"etag">>, <<>>};
+table_get_static(8) -> {<<"if-modified-since">>, <<>>};
+table_get_static(9) -> {<<"if-none-match">>, <<>>};
+table_get_static(10) -> {<<"last-modified">>, <<>>};
+table_get_static(11) -> {<<"link">>, <<>>};
+table_get_static(12) -> {<<"location">>, <<>>};
+table_get_static(13) -> {<<"referer">>, <<>>};
+table_get_static(14) -> {<<"set-cookie">>, <<>>};
+table_get_static(15) -> {<<":method">>, <<"CONNECT">>};
+table_get_static(16) -> {<<":method">>, <<"DELETE">>};
+table_get_static(17) -> {<<":method">>, <<"GET">>};
+table_get_static(18) -> {<<":method">>, <<"HEAD">>};
+table_get_static(19) -> {<<":method">>, <<"OPTIONS">>};
+table_get_static(20) -> {<<":method">>, <<"POST">>};
+table_get_static(21) -> {<<":method">>, <<"PUT">>};
+table_get_static(22) -> {<<":scheme">>, <<"http">>};
+table_get_static(23) -> {<<":scheme">>, <<"https">>};
+table_get_static(24) -> {<<":status">>, <<"103">>};
+table_get_static(25) -> {<<":status">>, <<"200">>};
+table_get_static(26) -> {<<":status">>, <<"304">>};
+table_get_static(27) -> {<<":status">>, <<"404">>};
+table_get_static(28) -> {<<":status">>, <<"503">>};
+table_get_static(29) -> {<<"accept">>, <<"*/*">>};
+table_get_static(30) -> {<<"accept">>, <<"application/dns-message">>};
+table_get_static(31) -> {<<"accept-encoding">>, <<"gzip, deflate, br">>};
+table_get_static(32) -> {<<"accept-ranges">>, <<"bytes">>};
+table_get_static(33) -> {<<"access-control-allow-headers">>, <<"cache-control">>};
+table_get_static(34) -> {<<"access-control-allow-headers">>, <<"content-type">>};
+table_get_static(35) -> {<<"access-control-allow-origin">>, <<"*">>};
+table_get_static(36) -> {<<"cache-control">>, <<"max-age=0">>};
+table_get_static(37) -> {<<"cache-control">>, <<"max-age=2592000">>};
+table_get_static(38) -> {<<"cache-control">>, <<"max-age=604800">>};
+table_get_static(39) -> {<<"cache-control">>, <<"no-cache">>};
+table_get_static(40) -> {<<"cache-control">>, <<"no-store">>};
+table_get_static(41) -> {<<"cache-control">>, <<"public, max-age=31536000">>};
+table_get_static(42) -> {<<"content-encoding">>, <<"br">>};
+table_get_static(43) -> {<<"content-encoding">>, <<"gzip">>};
+table_get_static(44) -> {<<"content-type">>, <<"application/dns-message">>};
+table_get_static(45) -> {<<"content-type">>, <<"application/javascript">>};
+table_get_static(46) -> {<<"content-type">>, <<"application/json">>};
+table_get_static(47) -> {<<"content-type">>, <<"application/x-www-form-urlencoded">>};
+table_get_static(48) -> {<<"content-type">>, <<"image/gif">>};
+table_get_static(49) -> {<<"content-type">>, <<"image/jpeg">>};
+table_get_static(50) -> {<<"content-type">>, <<"image/png">>};
+table_get_static(51) -> {<<"content-type">>, <<"text/css">>};
+table_get_static(52) -> {<<"content-type">>, <<"text/html; charset=utf-8">>};
+table_get_static(53) -> {<<"content-type">>, <<"text/plain">>};
+table_get_static(54) -> {<<"content-type">>, <<"text/plain;charset=utf-8">>};
+table_get_static(55) -> {<<"range">>, <<"bytes=0-">>};
+table_get_static(56) -> {<<"strict-transport-security">>, <<"max-age=31536000">>};
+table_get_static(57) -> {<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains">>};
+table_get_static(58) -> {<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains; preload">>};
+table_get_static(59) -> {<<"vary">>, <<"accept-encoding">>};
+table_get_static(60) -> {<<"vary">>, <<"origin">>};
+table_get_static(61) -> {<<"x-content-type-options">>, <<"nosniff">>};
+table_get_static(62) -> {<<"x-xss-protection">>, <<"1; mode=block">>};
+table_get_static(63) -> {<<":status">>, <<"100">>};
+table_get_static(64) -> {<<":status">>, <<"204">>};
+table_get_static(65) -> {<<":status">>, <<"206">>};
+table_get_static(66) -> {<<":status">>, <<"302">>};
+table_get_static(67) -> {<<":status">>, <<"400">>};
+table_get_static(68) -> {<<":status">>, <<"403">>};
+table_get_static(69) -> {<<":status">>, <<"421">>};
+table_get_static(70) -> {<<":status">>, <<"425">>};
+table_get_static(71) -> {<<":status">>, <<"500">>};
+table_get_static(72) -> {<<"accept-language">>, <<>>};
+%% These two values are technically invalid. An errata has already
+%% been submitted to the RFC. We must however continue to include
+%% them in the table for compatibility.
+table_get_static(73) -> {<<"access-control-allow-credentials">>, <<"FALSE">>};
+table_get_static(74) -> {<<"access-control-allow-credentials">>, <<"TRUE">>};
+table_get_static(75) -> {<<"access-control-allow-headers">>, <<"*">>};
+table_get_static(76) -> {<<"access-control-allow-methods">>, <<"get">>};
+table_get_static(77) -> {<<"access-control-allow-methods">>, <<"get, post, options">>};
+table_get_static(78) -> {<<"access-control-allow-methods">>, <<"options">>};
+table_get_static(79) -> {<<"access-control-expose-headers">>, <<"content-length">>};
+table_get_static(80) -> {<<"access-control-request-headers">>, <<"content-type">>};
+table_get_static(81) -> {<<"access-control-request-method">>, <<"get">>};
+table_get_static(82) -> {<<"access-control-request-method">>, <<"post">>};
+table_get_static(83) -> {<<"alt-svc">>, <<"clear">>};
+table_get_static(84) -> {<<"authorization">>, <<>>};
+table_get_static(85) -> {<<"content-security-policy">>, <<"script-src 'none'; object-src 'none'; base-uri 'none'">>};
+table_get_static(86) -> {<<"early-data">>, <<"1">>};
+table_get_static(87) -> {<<"expect-ct">>, <<>>};
+table_get_static(88) -> {<<"forwarded">>, <<>>};
+table_get_static(89) -> {<<"if-range">>, <<>>};
+table_get_static(90) -> {<<"origin">>, <<>>};
+table_get_static(91) -> {<<"purpose">>, <<"prefetch">>};
+table_get_static(92) -> {<<"server">>, <<>>};
+table_get_static(93) -> {<<"timing-allow-origin">>, <<"*">>};
+table_get_static(94) -> {<<"upgrade-insecure-requests">>, <<"1">>};
+table_get_static(95) -> {<<"user-agent">>, <<>>};
+table_get_static(96) -> {<<"x-forwarded-for">>, <<>>};
+table_get_static(97) -> {<<"x-frame-options">>, <<"deny">>};
+table_get_static(98) -> {<<"x-frame-options">>, <<"sameorigin">>}.
+
+table_get_name_static(0) -> <<":authority">>;
+table_get_name_static(1) -> <<":path">>;
+table_get_name_static(2) -> <<"age">>;
+table_get_name_static(3) -> <<"content-disposition">>;
+table_get_name_static(4) -> <<"content-length">>;
+table_get_name_static(5) -> <<"cookie">>;
+table_get_name_static(6) -> <<"date">>;
+table_get_name_static(7) -> <<"etag">>;
+table_get_name_static(8) -> <<"if-modified-since">>;
+table_get_name_static(9) -> <<"if-none-match">>;
+table_get_name_static(10) -> <<"last-modified">>;
+table_get_name_static(11) -> <<"link">>;
+table_get_name_static(12) -> <<"location">>;
+table_get_name_static(13) -> <<"referer">>;
+table_get_name_static(14) -> <<"set-cookie">>;
+table_get_name_static(15) -> <<":method">>;
+table_get_name_static(16) -> <<":method">>;
+table_get_name_static(17) -> <<":method">>;
+table_get_name_static(18) -> <<":method">>;
+table_get_name_static(19) -> <<":method">>;
+table_get_name_static(20) -> <<":method">>;
+table_get_name_static(21) -> <<":method">>;
+table_get_name_static(22) -> <<":scheme">>;
+table_get_name_static(23) -> <<":scheme">>;
+table_get_name_static(24) -> <<":status">>;
+table_get_name_static(25) -> <<":status">>;
+table_get_name_static(26) -> <<":status">>;
+table_get_name_static(27) -> <<":status">>;
+table_get_name_static(28) -> <<":status">>;
+table_get_name_static(29) -> <<"accept">>;
+table_get_name_static(30) -> <<"accept">>;
+table_get_name_static(31) -> <<"accept-encoding">>;
+table_get_name_static(32) -> <<"accept-ranges">>;
+table_get_name_static(33) -> <<"access-control-allow-headers">>;
+table_get_name_static(34) -> <<"access-control-allow-headers">>;
+table_get_name_static(35) -> <<"access-control-allow-origin">>;
+table_get_name_static(36) -> <<"cache-control">>;
+table_get_name_static(37) -> <<"cache-control">>;
+table_get_name_static(38) -> <<"cache-control">>;
+table_get_name_static(39) -> <<"cache-control">>;
+table_get_name_static(40) -> <<"cache-control">>;
+table_get_name_static(41) -> <<"cache-control">>;
+table_get_name_static(42) -> <<"content-encoding">>;
+table_get_name_static(43) -> <<"content-encoding">>;
+table_get_name_static(44) -> <<"content-type">>;
+table_get_name_static(45) -> <<"content-type">>;
+table_get_name_static(46) -> <<"content-type">>;
+table_get_name_static(47) -> <<"content-type">>;
+table_get_name_static(48) -> <<"content-type">>;
+table_get_name_static(49) -> <<"content-type">>;
+table_get_name_static(50) -> <<"content-type">>;
+table_get_name_static(51) -> <<"content-type">>;
+table_get_name_static(52) -> <<"content-type">>;
+table_get_name_static(53) -> <<"content-type">>;
+table_get_name_static(54) -> <<"content-type">>;
+table_get_name_static(55) -> <<"range">>;
+table_get_name_static(56) -> <<"strict-transport-security">>;
+table_get_name_static(57) -> <<"strict-transport-security">>;
+table_get_name_static(58) -> <<"strict-transport-security">>;
+table_get_name_static(59) -> <<"vary">>;
+table_get_name_static(60) -> <<"vary">>;
+table_get_name_static(61) -> <<"x-content-type-options">>;
+table_get_name_static(62) -> <<"x-xss-protection">>;
+table_get_name_static(63) -> <<":status">>;
+table_get_name_static(64) -> <<":status">>;
+table_get_name_static(65) -> <<":status">>;
+table_get_name_static(66) -> <<":status">>;
+table_get_name_static(67) -> <<":status">>;
+table_get_name_static(68) -> <<":status">>;
+table_get_name_static(69) -> <<":status">>;
+table_get_name_static(70) -> <<":status">>;
+table_get_name_static(71) -> <<":status">>;
+table_get_name_static(72) -> <<"accept-language">>;
+table_get_name_static(73) -> <<"access-control-allow-credentials">>;
+table_get_name_static(74) -> <<"access-control-allow-credentials">>;
+table_get_name_static(75) -> <<"access-control-allow-headers">>;
+table_get_name_static(76) -> <<"access-control-allow-methods">>;
+table_get_name_static(77) -> <<"access-control-allow-methods">>;
+table_get_name_static(78) -> <<"access-control-allow-methods">>;
+table_get_name_static(79) -> <<"access-control-expose-headers">>;
+table_get_name_static(80) -> <<"access-control-request-headers">>;
+table_get_name_static(81) -> <<"access-control-request-method">>;
+table_get_name_static(82) -> <<"access-control-request-method">>;
+table_get_name_static(83) -> <<"alt-svc">>;
+table_get_name_static(84) -> <<"authorization">>;
+table_get_name_static(85) -> <<"content-security-policy">>;
+table_get_name_static(86) -> <<"early-data">>;
+table_get_name_static(87) -> <<"expect-ct">>;
+table_get_name_static(88) -> <<"forwarded">>;
+table_get_name_static(89) -> <<"if-range">>;
+table_get_name_static(90) -> <<"origin">>;
+table_get_name_static(91) -> <<"purpose">>;
+table_get_name_static(92) -> <<"server">>;
+table_get_name_static(93) -> <<"timing-allow-origin">>;
+table_get_name_static(94) -> <<"upgrade-insecure-requests">>;
+table_get_name_static(95) -> <<"user-agent">>;
+table_get_name_static(96) -> <<"x-forwarded-for">>;
+table_get_name_static(97) -> <<"x-frame-options">>;
+table_get_name_static(98) -> <<"x-frame-options">>.
+
+table_insert(Entry={Name, Value}, State=#state{capacity=Capacity,
+ size=Size0, insert_count=InsertCount, dyn_table=DynamicTable0,
+ draining_size=DrainingSize}) ->
+ EntrySize = byte_size(Name) + byte_size(Value) + 32,
+ if
+ EntrySize + Size0 =< Capacity ->
+ {ok, State#state{size=Size0 + EntrySize, insert_count=InsertCount + 1,
+ dyn_table=[{EntrySize, Entry}|DynamicTable0]}};
+ EntrySize =< Capacity ->
+ {DynamicTable, Size} = table_evict(DynamicTable0,
+ Capacity - EntrySize, 0, []),
+ {ok, State#state{size=Size + EntrySize, insert_count=InsertCount + 1,
+ dyn_table=[{EntrySize, Entry}|DynamicTable],
+ %% We reduce the draining size by how much was gained from evicting.
+ draining_size=DrainingSize - (Size0 - Size)}};
+ true -> % EntrySize > Capacity ->
+ {connection_error, qpack_encoder_stream_error,
+ 'Entry size larger than table capacity. (RFC9204 3.2.2)'}
+ end.
+
+table_evict([], _, Size, Acc) ->
+ {lists:reverse(Acc), Size};
+table_evict([{EntrySize, _}|_], MaxSize, Size, Acc)
+ when Size + EntrySize > MaxSize ->
+ {lists:reverse(Acc), Size};
+table_evict([Entry = {EntrySize, _}|Tail], MaxSize, Size, Acc) ->
+ table_evict(Tail, MaxSize, Size + EntrySize, [Entry|Acc]).
+
+table_find_dyn(Entry, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ table_find_dyn(Entry, DynamicTable, InsertCount - 1).
+
+table_find_dyn(_, [], _) ->
+ not_found;
+table_find_dyn(Entry, [{_, Entry}|_], Index) ->
+ Index;
+table_find_dyn(Entry, [_|Tail], Index) ->
+ table_find_dyn(Entry, Tail, Index - 1).
+
+table_find_name_dyn(Name, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ table_find_name_dyn(Name, DynamicTable, InsertCount - 1).
+
+table_find_name_dyn(_, [], _) ->
+ not_found;
+table_find_name_dyn(Name, [{_, {Name, _}}|_], Index) ->
+ Index;
+table_find_name_dyn(Name, [_|Tail], Index) ->
+ table_find_name_dyn(Name, Tail, Index - 1).
+
+%% @todo These functions may error out if the encoder is invalid (2.2.3. Invalid References).
+table_get_dyn_abs(Index, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ {_, Header} = lists:nth(InsertCount - Index, DynamicTable),
+ Header.
+
+table_get_dyn_rel(Index, #state{dyn_table=DynamicTable}) ->
+ {_, Header} = lists:nth(1 + Index, DynamicTable),
+ Header.
+
+table_get_name_dyn_rel(Index, State) ->
+ {Name, _} = table_get_dyn_rel(Index, State),
+ Name.
+
+table_get_dyn_pre_base(Index, Base, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ BaseOffset = InsertCount - Base,
+ {_, Header} = lists:nth(1 + Index + BaseOffset, DynamicTable),
+ Header.
+
+table_get_dyn_post_base(Index, Base, State) ->
+ table_get_dyn_abs(Base + Index, State).
+
+table_get_name_dyn_post_base(Index, Base, State) ->
+ {Name, _} = table_get_dyn_abs(Base + Index, State),
+ Name.
+
+-ifdef(TEST).
+do_init() ->
+ #state{
+ settings_received=false,
+ max_table_capacity=1000,
+ capacity=1000
+ }.
+
+do_table_insert(Entry, State0) ->
+ {ok, State} = table_insert(Entry, State0),
+ State.
+
+table_get_dyn_abs_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"a">>, <<"b">>} = table_get_dyn_abs(0, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_abs(1, State1),
+ {<<"e">>, <<"f">>} = table_get_dyn_abs(2, State1),
+ {<<"g">>, <<"h">>} = table_get_dyn_abs(3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"c">>, <<"d">>} = table_get_dyn_abs(1, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_abs(2, State2),
+ {<<"g">>, <<"h">>} = table_get_dyn_abs(3, State2),
+ ok.
+
+table_get_dyn_rel_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(0, State1),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(1, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(2, State1),
+ {<<"a">>, <<"b">>} = table_get_dyn_rel(3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(0, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(1, State2),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(2, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"i">>, <<"j">>} = table_get_dyn_rel(0, State3),
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(1, State3),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(2, State3),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(3, State3),
+ ok.
+
+table_get_dyn_pre_base_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State1),
+ {<<"a">>, <<"b">>} = table_get_dyn_pre_base(2, 3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State2),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State3),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State3),
+ ok.
+
+table_get_dyn_post_base_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State1),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State2),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State3),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State3),
+ {<<"i">>, <<"j">>} = table_get_dyn_post_base(2, 2, State3),
+ ok.
+-endif.