aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/ci.yaml70
-rw-r--r--LICENSE2
-rw-r--r--Makefile27
-rw-r--r--doc/src/manual/cow_cookie.asciidoc6
-rw-r--r--doc/src/manual/cow_cookie.setcookie.asciidoc1
-rw-r--r--ebin/cowlib.app5
-rw-r--r--erlang.mk1607
-rw-r--r--include/cow_inline.hrl2
-rw-r--r--include/cow_parse.hrl2
-rw-r--r--src/cow_base64url.erl2
-rw-r--r--src/cow_cookie.erl98
-rw-r--r--src/cow_date.erl2
-rw-r--r--src/cow_hpack.erl362
-rw-r--r--src/cow_hpack_common.hrl376
-rw-r--r--src/cow_hpack_dec_huffman_lookup.hrl2
-rw-r--r--src/cow_http.erl683
-rw-r--r--src/cow_http1.erl421
-rw-r--r--src/cow_http2.erl19
-rw-r--r--src/cow_http2_machine.erl458
-rw-r--r--src/cow_http3.erl458
-rw-r--r--src/cow_http3_machine.erl721
-rw-r--r--src/cow_http_hd.erl108
-rw-r--r--src/cow_http_struct_hd.erl336
-rw-r--r--src/cow_http_te.erl2
-rw-r--r--src/cow_iolists.erl2
-rw-r--r--src/cow_link.erl2
-rw-r--r--src/cow_mimetypes.erl2
-rw-r--r--src/cow_mimetypes.erl.src2
-rw-r--r--src/cow_multipart.erl2
-rw-r--r--src/cow_qpack.erl1581
-rw-r--r--src/cow_qs.erl2
-rw-r--r--src/cow_spdy.erl2
-rw-r--r--src/cow_sse.erl7
-rw-r--r--src/cow_uri.erl2
-rw-r--r--src/cow_uri_template.erl6
-rw-r--r--src/cow_ws.erl2
36 files changed, 5120 insertions, 2262 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 0000000..57a2cb1
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,70 @@
+## Use workflows from ninenines/ci.erlang.mk to test Cowlib.
+
+name: Check Cowlib
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ schedule:
+ ## Every Monday at 2am.
+ - cron: 0 2 * * 1
+
+env:
+ CI_ERLANG_MK: 1
+
+jobs:
+ cleanup-master:
+ name: Cleanup master build
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: Cleanup master build if necessary
+ if: ${{ github.event_name == 'schedule' }}
+ run: |
+ gh extension install actions/gh-actions-cache
+ gh actions-cache delete Linux-X64-Erlang-master -R $REPO --confirm || true
+ gh actions-cache delete macOS-X64-Erlang-master -R $REPO --confirm || true
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ REPO: ${{ github.repository }}
+
+ check:
+ name: Cowlib
+ needs: cleanup-master
+ uses: ninenines/ci.erlang.mk/.github/workflows/ci.yaml@master
+
+# The perfs tests are nice to run but typically not
+# important. So we run them after we are done with the other
+# test suites. At this point we know that Erlang was built
+# so we can just use the latest version.
+
+ perfs:
+ name: Run performance tests
+ needs: check
+ runs-on: 'ubuntu-latest'
+ if: ${{ !cancelled() }}
+ steps:
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Output latest Erlang/OTP version
+ id: latest_version
+ run: |
+ {
+ echo "latest<<EOF"
+ make ci-list | grep -v rc | grep -v master | tail -n1
+ echo EOF
+ } >> "$GITHUB_OUTPUT"
+
+ - name: Restore CI cache
+ uses: actions/cache/restore@v4
+ with:
+ path: |
+ ~/erlang/
+ key: ${{ runner.os }}-${{ runner.arch }}-Erlang-${{ steps.latest_version.outputs.latest }}
+
+ - name: Run perfs
+ run: make perfs LATEST_ERLANG_OTP=1
diff --git a/LICENSE b/LICENSE
index 0b60cff..fac238f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2013-2020, Loïc Hoguin <[email protected]>
+Copyright (c) 2013-2024, Loïc Hoguin <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/Makefile b/Makefile
index e50baec..b63c221 100644
--- a/Makefile
+++ b/Makefile
@@ -2,16 +2,11 @@
PROJECT = cowlib
PROJECT_DESCRIPTION = Support library for manipulating Web protocols.
-PROJECT_VERSION = 2.10.1
+PROJECT_VERSION = 2.13.0
# Options.
#ERLC_OPTS += +bin_opt_info
-ifdef HIPE
- ERLC_OPTS += -smp +native
- TEST_ERLC_OPTS += -smp +native
-endif
-
DIALYZER_OPTS = -Werror_handling -Wunmatched_returns
# Dependencies.
@@ -21,11 +16,12 @@ LOCAL_DEPS = crypto
DOC_DEPS = asciideck
TEST_DEPS = $(if $(CI_ERLANG_MK),ci.erlang.mk) base32 horse proper jsx \
- structured-header-tests uritemplate-tests
-dep_base32 = git https://github.com/dnsimple/base32_erlang master
+ decimal structured-header-tests uritemplate-tests
+dep_base32 = git https://github.com/dnsimple/base32_erlang main
dep_horse = git https://github.com/ninenines/horse.git master
dep_jsx = git https://github.com/talentdeficit/jsx v2.10.0
-dep_structured-header-tests = git https://github.com/httpwg/structured-header-tests e614583397e7f65e0082c0fff3929f32a298b9f2
+dep_decimal = git https://github.com/egobrain/decimal 0.6.2
+dep_structured-header-tests = git https://github.com/httpwg/structured-header-tests faed1f92942abd4fb5d61b1f9f0dc359f499f1d7
dep_uritemplate-tests = git https://github.com/uri-templates/uritemplate-test master
# CI configuration.
@@ -33,10 +29,8 @@ dep_uritemplate-tests = git https://github.com/uri-templates/uritemplate-test ma
dep_ci.erlang.mk = git https://github.com/ninenines/ci.erlang.mk master
DEP_EARLY_PLUGINS = ci.erlang.mk
-AUTO_CI_OTP ?= OTP-21+
-AUTO_CI_HIPE ?= OTP-LATEST
-# AUTO_CI_ERLLVM ?= OTP-LATEST
-AUTO_CI_WINDOWS ?= OTP-21+
+AUTO_CI_OTP ?= OTP-LATEST-24+
+AUTO_CI_WINDOWS ?= OTP-LATEST-24+
# Hex configuration.
@@ -44,7 +38,7 @@ define HEX_TARBALL_EXTRA_METADATA
#{
licenses => [<<"ISC">>],
links => #{
- <<"Function reference">> => <<"https://ninenines.eu/docs/en/cowlib/2.10/manual/">>,
+ <<"Function reference">> => <<"https://ninenines.eu/docs/en/cowlib/2.13/manual/">>,
<<"GitHub">> => <<"https://github.com/ninenines/cowlib">>,
<<"Sponsor">> => <<"https://github.com/sponsors/essen">>
}
@@ -55,6 +49,11 @@ endef
include erlang.mk
+# Always rebuild from scratch in CI because OTP-25.0+ can't use the older build.
+
+ci-setup:: distclean-deps
+ -$(verbose) rm -rf $(ERLANG_MK_TMP)/rebar
+
# Compile options.
TEST_ERLC_OPTS += +'{parse_transform, eunit_autoexport}' +'{parse_transform, horse_autoexport}'
diff --git a/doc/src/manual/cow_cookie.asciidoc b/doc/src/manual/cow_cookie.asciidoc
index 0bde0ed..035342d 100644
--- a/doc/src/manual/cow_cookie.asciidoc
+++ b/doc/src/manual/cow_cookie.asciidoc
@@ -29,7 +29,7 @@ cookie_attrs() :: #{
path => binary(),
secure => true,
http_only => true,
- same_site => strict | lax | none
+ same_site => default | none | strict | lax
}
----
@@ -48,7 +48,7 @@ cookie_opts() :: #{
http_only => boolean(),
max_age => non_neg_integer(),
path => binary(),
- same_site => strict | lax | none,
+ same_site => default | none | strict | lax,
secure => boolean()
}
----
@@ -101,6 +101,8 @@ transfer. By default there are no restrictions.
== Changelog
+* *2.12*: The `same_site` attribute and option may now be
+ set to `default`.
* *2.10*: The `same_site` attribute and option may now be
set to `none`.
* *2.9*: The `cookie_attrs` type was added.
diff --git a/doc/src/manual/cow_cookie.setcookie.asciidoc b/doc/src/manual/cow_cookie.setcookie.asciidoc
index d600a07..77f98b1 100644
--- a/doc/src/manual/cow_cookie.setcookie.asciidoc
+++ b/doc/src/manual/cow_cookie.setcookie.asciidoc
@@ -36,6 +36,7 @@ An iolist with the generated set-cookie header value.
== Changelog
+* *2.12*: The `Version` attribute is no longer generated.
* *1.0*: Function introduced.
== Examples
diff --git a/ebin/cowlib.app b/ebin/cowlib.app
index d089998..47e4ced 100644
--- a/ebin/cowlib.app
+++ b/ebin/cowlib.app
@@ -1,8 +1,9 @@
{application, 'cowlib', [
{description, "Support library for manipulating Web protocols."},
- {vsn, "2.10.1"},
- {modules, ['cow_base64url','cow_cookie','cow_date','cow_hpack','cow_http','cow_http2','cow_http2_machine','cow_http_hd','cow_http_struct_hd','cow_http_te','cow_iolists','cow_link','cow_mimetypes','cow_multipart','cow_qs','cow_spdy','cow_sse','cow_uri','cow_uri_template','cow_ws']},
+ {vsn, "2.13.0"},
+ {modules, ['cow_base64url','cow_cookie','cow_date','cow_hpack','cow_http','cow_http1','cow_http2','cow_http2_machine','cow_http3','cow_http3_machine','cow_http_hd','cow_http_struct_hd','cow_http_te','cow_iolists','cow_link','cow_mimetypes','cow_multipart','cow_qpack','cow_qs','cow_spdy','cow_sse','cow_uri','cow_uri_template','cow_ws']},
{registered, []},
{applications, [kernel,stdlib,crypto]},
+ {optional_applications, []},
{env, []}
]}. \ No newline at end of file
diff --git a/erlang.mk b/erlang.mk
index 9174521..0c1b3ac 100644
--- a/erlang.mk
+++ b/erlang.mk
@@ -17,7 +17,7 @@
ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
export ERLANG_MK_FILENAME
-ERLANG_MK_VERSION = 2020.03.05-27-g7f608c6-dirty
+ERLANG_MK_VERSION = 2022.05.31-67-g61f58ff-dirty
ERLANG_MK_WITHOUT =
# Make 3.81 and 3.82 are deprecated.
@@ -171,7 +171,7 @@ endef
# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
+$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar3/_build/prod/lib/*/ebin/ -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
endef
ifeq ($(PLATFORM),msys2)
@@ -184,8 +184,8 @@ core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$
core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
+# We skip files that contain spaces or '#' because they end up causing issues.
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) -not -name "*[ \#]*"))
core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
@@ -252,15 +252,6 @@ $(KERL_INSTALL_DIR)/$(1): $(KERL)
fi
endef
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
$(KERL): $(KERL_DIR)
$(KERL_DIR): | $(ERLANG_MK_TMP)
@@ -283,10 +274,10 @@ ERLANG_OTP := $(notdir $(lastword $(sort\
endif
ERLANG_OTP ?=
-ERLANG_HIPE ?=
# Use kerl to enforce a specific Erlang/OTP version for a project.
ifneq ($(strip $(ERLANG_OTP)),)
+
export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
SHELL := env PATH=$(PATH) $(SHELL)
$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
@@ -297,20 +288,6 @@ $(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
endif
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
endif
PACKAGES += aberth
@@ -329,22 +306,6 @@ pkg_active_fetch = git
pkg_active_repo = https://github.com/proger/active
pkg_active_commit = master
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
PACKAGES += aleppo
pkg_aleppo_name = aleppo
pkg_aleppo_description = Alternative Erlang Pre-Processor
@@ -361,14 +322,6 @@ pkg_alog_fetch = git
pkg_alog_repo = https://github.com/siberian-fast-food/alogger
pkg_alog_commit = master
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
PACKAGES += annotations
pkg_annotations_name = annotations
pkg_annotations_description = Simple code instrumentation utilities
@@ -377,14 +330,6 @@ pkg_annotations_fetch = git
pkg_annotations_repo = https://github.com/hyperthunk/annotations
pkg_annotations_commit = master
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
PACKAGES += apns
pkg_apns_name = apns
pkg_apns_description = Apple Push Notification Server for Erlang
@@ -401,14 +346,6 @@ pkg_asciideck_fetch = git
pkg_asciideck_repo = https://github.com/ninenines/asciideck
pkg_asciideck_commit = master
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
PACKAGES += backoff
pkg_backoff_name = backoff
pkg_backoff_description = Simple exponential backoffs in Erlang
@@ -449,14 +386,6 @@ pkg_beam_fetch = git
pkg_beam_repo = https://github.com/tonyrog/beam
pkg_beam_commit = master
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
PACKAGES += bear
pkg_bear_name = bear
pkg_bear_description = a set of statistics functions for erlang
@@ -505,14 +434,6 @@ pkg_bitcask_fetch = git
pkg_bitcask_repo = https://github.com/basho/bitcask
pkg_bitcask_commit = develop
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
PACKAGES += bootstrap
pkg_bootstrap_name = bootstrap
pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
@@ -577,14 +498,6 @@ pkg_cake_fetch = git
pkg_cake_repo = https://github.com/darach/cake-erl
pkg_cake_commit = master
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
PACKAGES += cberl
pkg_cberl_name = cberl
pkg_cberl_description = NIF based Erlang bindings for Couchbase
@@ -627,7 +540,7 @@ pkg_check_node_commit = master
PACKAGES += chronos
pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstract time out of the tests.
pkg_chronos_homepage = https://github.com/lehoff/chronos
pkg_chronos_fetch = git
pkg_chronos_repo = https://github.com/lehoff/chronos
@@ -673,54 +586,6 @@ pkg_cloudi_service_api_requests_fetch = git
pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
pkg_cloudi_service_api_requests_commit = master
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
PACKAGES += cloudi_service_db_mysql
pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
@@ -737,22 +602,6 @@ pkg_cloudi_service_db_pgsql_fetch = git
pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
pkg_cloudi_service_db_pgsql_commit = master
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
PACKAGES += cloudi_service_filesystem
pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
@@ -833,14 +682,6 @@ pkg_cloudi_service_tcp_fetch = git
pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
pkg_cloudi_service_tcp_commit = master
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
PACKAGES += cloudi_service_udp
pkg_cloudi_service_udp_name = cloudi_service_udp
pkg_cloudi_service_udp_description = UDP CloudI Service
@@ -955,10 +796,10 @@ pkg_cr_commit = master
PACKAGES += cuttlefish
pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_description = cuttlefish configuration abstraction
+pkg_cuttlefish_homepage = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_repo = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_commit = master
PACKAGES += damocles
@@ -980,9 +821,9 @@ pkg_debbie_commit = master
PACKAGES += decimal
pkg_decimal_name = decimal
pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_homepage = https://github.com/egobrain/decimal
pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_repo = https://github.com/egobrain/decimal
pkg_decimal_commit = master
PACKAGES += detergent
@@ -993,14 +834,6 @@ pkg_detergent_fetch = git
pkg_detergent_repo = https://github.com/devinus/detergent
pkg_detergent_commit = master
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
PACKAGES += dh_date
pkg_dh_date_name = dh_date
pkg_dh_date_description = Date formatting / parsing library for erlang
@@ -1039,15 +872,7 @@ pkg_dns_description = Erlang DNS library
pkg_dns_homepage = https://github.com/aetrion/dns_erlang
pkg_dns_fetch = git
pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
+pkg_dns_commit = main
PACKAGES += dynamic_compile
pkg_dynamic_compile_name = dynamic_compile
@@ -1113,14 +938,6 @@ pkg_edgar_fetch = git
pkg_edgar_repo = https://github.com/crownedgrouse/edgar
pkg_edgar_commit = master
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
PACKAGES += edns
pkg_edns_name = edns
pkg_edns_description = Erlang/OTP DNS server
@@ -1172,10 +989,10 @@ pkg_egeoip_commit = master
PACKAGES += ehsa
pkg_ehsa_name = ehsa
pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
+pkg_ehsa_homepage = https://github.com/a12n/ehsa
+pkg_ehsa_fetch = git
+pkg_ehsa_repo = https://github.com/a12n/ehsa
+pkg_ehsa_commit = master
PACKAGES += ej
pkg_ej_name = ej
@@ -1223,7 +1040,7 @@ pkg_eleveldb_description = Erlang LevelDB API
pkg_eleveldb_homepage = https://github.com/basho/eleveldb
pkg_eleveldb_fetch = git
pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
+pkg_eleveldb_commit = develop
PACKAGES += elixir
pkg_elixir_name = elixir
@@ -1231,7 +1048,7 @@ pkg_elixir_description = Elixir is a dynamic, functional language designed for b
pkg_elixir_homepage = https://elixir-lang.org/
pkg_elixir_fetch = git
pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
+pkg_elixir_commit = main
PACKAGES += elli
pkg_elli_name = elli
@@ -1239,7 +1056,7 @@ pkg_elli_description = Simple, robust and performant Erlang web server
pkg_elli_homepage = https://github.com/elli-lib/elli
pkg_elli_fetch = git
pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
+pkg_elli_commit = main
PACKAGES += elvis
pkg_elvis_name = elvis
@@ -1257,14 +1074,6 @@ pkg_emagick_fetch = git
pkg_emagick_repo = https://github.com/kivra/emagick
pkg_emagick_commit = master
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
PACKAGES += enm
pkg_enm_name = enm
pkg_enm_description = Erlang driver for nanomsg
@@ -1353,14 +1162,6 @@ pkg_eredis_fetch = git
pkg_eredis_repo = https://github.com/wooga/eredis
pkg_eredis_commit = master
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
PACKAGES += erl_streams
pkg_erl_streams_name = erl_streams
pkg_erl_streams_description = Streams in Erlang
@@ -1369,22 +1170,6 @@ pkg_erl_streams_fetch = git
pkg_erl_streams_repo = https://github.com/epappas/erl_streams
pkg_erl_streams_commit = master
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
PACKAGES += erlang_localtime
pkg_erlang_localtime_name = erlang_localtime
pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
@@ -1417,14 +1202,6 @@ pkg_erlastic_search_fetch = git
pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
pkg_erlastic_search_commit = master
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
PACKAGES += erlbrake
pkg_erlbrake_name = erlbrake
pkg_erlbrake_description = Erlang Airbrake notification client
@@ -1471,7 +1248,7 @@ pkg_erldns_description = DNS server, in erlang.
pkg_erldns_homepage = https://github.com/aetrion/erl-dns
pkg_erldns_fetch = git
pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
+pkg_erldns_commit = main
PACKAGES += erldocker
pkg_erldocker_name = erldocker
@@ -1537,14 +1314,6 @@ pkg_erlpass_fetch = git
pkg_erlpass_repo = https://github.com/ferd/erlpass
pkg_erlpass_commit = master
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
PACKAGES += erlsh
pkg_erlsh_name = erlsh
pkg_erlsh_description = Erlang shell tools
@@ -1617,14 +1386,6 @@ pkg_erserve_fetch = git
pkg_erserve_repo = https://github.com/del/erserve
pkg_erserve_commit = master
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
PACKAGES += escalus
pkg_escalus_name = escalus
pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
@@ -1753,14 +1514,6 @@ pkg_exs1024_fetch = git
pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
pkg_exs1024_commit = master
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
PACKAGES += exsplus116
pkg_exsplus116_name = exsplus116
pkg_exsplus116_description = Xorshift116plus for Erlang
@@ -1769,22 +1522,6 @@ pkg_exsplus116_fetch = git
pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
pkg_exsplus116_commit = master
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
PACKAGES += ezmtp
pkg_ezmtp_name = ezmtp
pkg_ezmtp_description = ZMTP protocol in pure Erlang.
@@ -1857,14 +1594,6 @@ pkg_folsom_cowboy_fetch = git
pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
pkg_folsom_cowboy_commit = master
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
PACKAGES += fs
pkg_fs_name = fs
pkg_fs_description = Erlang FileSystem Listener
@@ -2041,14 +1770,6 @@ pkg_gitty_fetch = git
pkg_gitty_repo = https://github.com/maxlapshin/gitty
pkg_gitty_commit = master
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
PACKAGES += gpb
pkg_gpb_name = gpb
pkg_gpb_description = A Google Protobuf implementation for Erlang
@@ -2097,14 +1818,6 @@ pkg_gun_fetch = git
pkg_gun_repo = https://github.com/ninenines/gun
pkg_gun_commit = master
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
PACKAGES += hackney
pkg_hackney_name = hackney
pkg_hackney_description = simple HTTP client in Erlang
@@ -2121,14 +1834,6 @@ pkg_hamcrest_fetch = git
pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
pkg_hamcrest_commit = master
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
PACKAGES += hottub
pkg_hottub_name = hottub
pkg_hottub_description = Permanent Erlang Worker Pool
@@ -2177,22 +1882,6 @@ pkg_idna_fetch = git
pkg_idna_repo = https://github.com/benoitc/erlang-idna
pkg_idna_commit = master
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
PACKAGES += irc_lib
pkg_irc_lib_name = irc_lib
pkg_irc_lib_description = Erlang irc client library
@@ -2233,14 +1922,6 @@ pkg_jamdb_sybase_fetch = git
pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
pkg_jamdb_sybase_commit = master
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
PACKAGES += jesse
pkg_jesse_name = jesse
pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
@@ -2267,10 +1948,10 @@ pkg_jiffy_v_commit = master
PACKAGES += jobs
pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_description = Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/uwiger/jobs
pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_repo = https://github.com/uwiger/jobs
pkg_jobs_commit = master
PACKAGES += joxa
@@ -2281,14 +1962,6 @@ pkg_joxa_fetch = git
pkg_joxa_repo = https://github.com/joxa/joxa
pkg_joxa_commit = master
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
PACKAGES += json_rec
pkg_json_rec_name = json_rec
pkg_json_rec_description = JSON to erlang record
@@ -2305,14 +1978,6 @@ pkg_jsone_fetch = git
pkg_jsone_repo = https://github.com/sile/jsone.git
pkg_jsone_commit = master
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
PACKAGES += jsonpath
pkg_jsonpath_name = jsonpath
pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
@@ -2337,20 +2002,12 @@ pkg_jsx_fetch = git
pkg_jsx_repo = https://github.com/talentdeficit/jsx
pkg_jsx_commit = main
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
PACKAGES += kafka_protocol
pkg_kafka_protocol_name = kafka_protocol
pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_homepage = https://github.com/kafka4beam/kafka_protocol
pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_repo = https://github.com/kafka4beam/kafka_protocol
pkg_kafka_protocol_commit = master
PACKAGES += kai
@@ -2369,14 +2026,6 @@ pkg_katja_fetch = git
pkg_katja_repo = https://github.com/nifoc/katja
pkg_katja_commit = master
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
PACKAGES += key2value
pkg_key2value_name = key2value
pkg_key2value_description = Erlang 2-way map
@@ -2399,7 +2048,7 @@ pkg_kinetic_description = Erlang Kinesis Client
pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
pkg_kinetic_fetch = git
pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
+pkg_kinetic_commit = main
PACKAGES += kjell
pkg_kjell_name = kjell
@@ -2457,14 +2106,6 @@ pkg_lager_fetch = git
pkg_lager_repo = https://github.com/erlang-lager/lager
pkg_lager_commit = master
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
PACKAGES += lager_syslog
pkg_lager_syslog_name = lager_syslog
pkg_lager_syslog_description = Syslog backend for lager
@@ -2473,22 +2114,6 @@ pkg_lager_syslog_fetch = git
pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
pkg_lager_syslog_commit = master
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
PACKAGES += lasse
pkg_lasse_name = lasse
pkg_lasse_description = SSE handler for Cowboy
@@ -2505,14 +2130,6 @@ pkg_ldap_fetch = git
pkg_ldap_repo = https://github.com/spawnproc/ldap
pkg_ldap_commit = master
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
PACKAGES += lfe
pkg_lfe_name = lfe
pkg_lfe_description = Lisp Flavoured Erlang (LFE)
@@ -2521,14 +2138,6 @@ pkg_lfe_fetch = git
pkg_lfe_repo = https://github.com/rvirding/lfe
pkg_lfe_commit = master
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
PACKAGES += live
pkg_live_name = live
pkg_live_description = Automated module and configuration reloader.
@@ -2537,14 +2146,6 @@ pkg_live_fetch = git
pkg_live_repo = https://github.com/ninenines/live
pkg_live_commit = master
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
PACKAGES += locker
pkg_locker_name = locker
pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
@@ -2593,14 +2194,6 @@ pkg_luerl_fetch = git
pkg_luerl_repo = https://github.com/rvirding/luerl
pkg_luerl_commit = develop
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
PACKAGES += lux
pkg_lux_name = lux
pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
@@ -2609,14 +2202,6 @@ pkg_lux_fetch = git
pkg_lux_repo = https://github.com/hawk/lux
pkg_lux_commit = master
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
PACKAGES += mad
pkg_mad_name = mad
pkg_mad_description = Small and Fast Rebar Replacement
@@ -2641,30 +2226,6 @@ pkg_mavg_fetch = git
pkg_mavg_repo = https://github.com/EchoTeam/mavg
pkg_mavg_commit = master
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
PACKAGES += meck
pkg_meck_name = meck
pkg_meck_description = A mocking library for Erlang
@@ -2681,22 +2242,6 @@ pkg_mekao_fetch = git
pkg_mekao_repo = https://github.com/ddosia/mekao
pkg_mekao_commit = master
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
PACKAGES += merl
pkg_merl_name = merl
pkg_merl_description = Metaprogramming in Erlang
@@ -2727,7 +2272,7 @@ pkg_mixer_description = Mix in functions from other modules
pkg_mixer_homepage = https://github.com/chef/mixer
pkg_mixer_fetch = git
pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
+pkg_mixer_commit = main
PACKAGES += mochiweb
pkg_mochiweb_name = mochiweb
@@ -2735,7 +2280,7 @@ pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweigh
pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
pkg_mochiweb_fetch = git
pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
+pkg_mochiweb_commit = main
PACKAGES += mochiweb_xpath
pkg_mochiweb_xpath_name = mochiweb_xpath
@@ -2823,7 +2368,7 @@ pkg_mysql_description = MySQL client library for Erlang/OTP
pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
pkg_mysql_fetch = git
pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
+pkg_mysql_commit = 1.7.0
PACKAGES += n2o
pkg_n2o_name = n2o
@@ -2857,14 +2402,6 @@ pkg_neotoma_fetch = git
pkg_neotoma_repo = https://github.com/seancribbs/neotoma
pkg_neotoma_commit = master
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
PACKAGES += nifty
pkg_nifty_name = nifty
pkg_nifty_description = Erlang NIF wrapper generator
@@ -2881,22 +2418,6 @@ pkg_nitrogen_core_fetch = git
pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
pkg_nitrogen_core_commit = master
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
PACKAGES += nkpacket
pkg_nkpacket_name = nkpacket
pkg_nkpacket_description = Generic Erlang transport layer
@@ -2935,7 +2456,7 @@ pkg_oauth_description = An Erlang OAuth 1.0 implementation
pkg_oauth_homepage = https://github.com/tim/erlang-oauth
pkg_oauth_fetch = git
pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
+pkg_oauth_commit = main
PACKAGES += oauth2
pkg_oauth2_name = oauth2
@@ -2961,22 +2482,6 @@ pkg_octopus_fetch = git
pkg_octopus_repo = https://github.com/erlangbureau/octopus
pkg_octopus_commit = master
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
PACKAGES += openflow
pkg_openflow_name = openflow
pkg_openflow_description = An OpenFlow controller written in pure erlang
@@ -3063,7 +2568,7 @@ pkg_pgo_description = Erlang Postgres client and connection pool
pkg_pgo_homepage = https://github.com/erleans/pgo.git
pkg_pgo_fetch = git
pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
+pkg_pgo_commit = main
PACKAGES += pgsql
pkg_pgsql_name = pgsql
@@ -3097,14 +2602,6 @@ pkg_plain_fsm_fetch = git
pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
pkg_plain_fsm_commit = master
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
PACKAGES += pmod_transform
pkg_pmod_transform_name = pmod_transform
pkg_pmod_transform_description = Parse transform for parameterized modules
@@ -3217,14 +2714,6 @@ pkg_purity_fetch = git
pkg_purity_repo = https://github.com/mpitid/purity
pkg_purity_commit = master
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
PACKAGES += qdate
pkg_qdate_name = qdate
pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
@@ -3257,14 +2746,6 @@ pkg_quickrand_fetch = git
pkg_quickrand_repo = https://github.com/okeuday/quickrand
pkg_quickrand_commit = master
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
PACKAGES += rabbit_exchange_type_riak
pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
@@ -3289,14 +2770,6 @@ pkg_radierl_fetch = git
pkg_radierl_repo = https://github.com/vances/radierl
pkg_radierl_commit = master
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
PACKAGES += ranch
pkg_ranch_name = ranch
pkg_ranch_description = Socket acceptor pool for TCP protocols.
@@ -3313,13 +2786,13 @@ pkg_rbeacon_fetch = git
pkg_rbeacon_repo = https://github.com/refuge/rbeacon
pkg_rbeacon_commit = master
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
+PACKAGES += re2
+pkg_re2_name = re2
+pkg_re2_description = Erlang NIF bindings for RE2 regex library
+pkg_re2_homepage = https://github.com/dukesoferl/re2
+pkg_re2_fetch = git
+pkg_re2_repo = https://github.com/dukesoferl/re2
+pkg_re2_commit = master
PACKAGES += rebus
pkg_rebus_name = rebus
@@ -3391,7 +2864,7 @@ pkg_relx_description = Sane, simple release creation for Erlang
pkg_relx_homepage = https://github.com/erlware/relx
pkg_relx_fetch = git
pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
+pkg_relx_commit = main
PACKAGES += resource_discovery
pkg_resource_discovery_name = resource_discovery
@@ -3417,21 +2890,13 @@ pkg_rfc4627_jsonrpc_fetch = git
pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
pkg_rfc4627_jsonrpc_commit = master
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
PACKAGES += riak_core
pkg_riak_core_name = riak_core
pkg_riak_core_description = Distributed systems infrastructure used by Riak.
pkg_riak_core_homepage = https://github.com/basho/riak_core
pkg_riak_core_fetch = git
pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
+pkg_riak_core_commit = develop
PACKAGES += riak_dt
pkg_riak_dt_name = riak_dt
@@ -3447,7 +2912,7 @@ pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
pkg_riak_ensemble_fetch = git
pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
+pkg_riak_ensemble_commit = develop
PACKAGES += riak_kv
pkg_riak_kv_name = riak_kv
@@ -3455,15 +2920,7 @@ pkg_riak_kv_description = Riak Key/Value Store
pkg_riak_kv_homepage = https://github.com/basho/riak_kv
pkg_riak_kv_fetch = git
pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
+pkg_riak_kv_commit = develop
PACKAGES += riak_pipe
pkg_riak_pipe_name = riak_pipe
@@ -3471,7 +2928,7 @@ pkg_riak_pipe_description = Riak Pipelines
pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
pkg_riak_pipe_fetch = git
pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
+pkg_riak_pipe_commit = develop
PACKAGES += riak_sysmon
pkg_riak_sysmon_name = riak_sysmon
@@ -3481,14 +2938,6 @@ pkg_riak_sysmon_fetch = git
pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
pkg_riak_sysmon_commit = master
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
PACKAGES += riakc
pkg_riakc_name = riakc
pkg_riakc_description = Erlang clients for Riak.
@@ -3497,38 +2946,6 @@ pkg_riakc_fetch = git
pkg_riakc_repo = https://github.com/basho/riak-erlang-client
pkg_riakc_commit = master
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
PACKAGES += rlimit
pkg_rlimit_name = rlimit
pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
@@ -3561,14 +2978,6 @@ pkg_seestar_fetch = git
pkg_seestar_repo = https://github.com/iamaleksey/seestar
pkg_seestar_commit = master
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
PACKAGES += setup
pkg_setup_name = setup
pkg_setup_description = Generic setup utility for Erlang-based systems
@@ -3623,7 +3032,7 @@ pkg_sidejob_description = Parallel worker and capacity limiting library for Erla
pkg_sidejob_homepage = https://github.com/basho/sidejob
pkg_sidejob_fetch = git
pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
+pkg_sidejob_commit = develop
PACKAGES += sieve
pkg_sieve_name = sieve
@@ -3633,14 +3042,6 @@ pkg_sieve_fetch = git
pkg_sieve_repo = https://github.com/benoitc/sieve
pkg_sieve_commit = master
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
PACKAGES += simhash
pkg_simhash_name = simhash
pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
@@ -3681,14 +3082,6 @@ pkg_slack_fetch = git
pkg_slack_repo = https://github.com/DonBranson/slack.git
pkg_slack_commit = master
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
PACKAGES += snappyer
pkg_snappyer_name = snappyer
pkg_snappyer_description = Snappy as nif for Erlang
@@ -3705,14 +3098,6 @@ pkg_social_fetch = git
pkg_social_repo = https://github.com/dvv/social
pkg_social_commit = master
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
PACKAGES += sqerl
pkg_sqerl_name = sqerl
pkg_sqerl_description = An Erlang-flavoured SQL DSL
@@ -3753,14 +3138,6 @@ pkg_statebox_fetch = git
pkg_statebox_repo = https://github.com/mochi/statebox
pkg_statebox_commit = master
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
PACKAGES += statman
pkg_statman_name = statman
pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
@@ -3793,14 +3170,6 @@ pkg_stockdb_fetch = git
pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
pkg_stockdb_commit = master
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
PACKAGES += subproc
pkg_subproc_name = subproc
pkg_subproc_description = unix subprocess manager with {active,once|false} modes
@@ -3817,14 +3186,6 @@ pkg_supervisor3_fetch = git
pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
pkg_supervisor3_commit = master
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
PACKAGES += swab
pkg_swab_name = swab
pkg_swab_description = General purpose buffer handling module
@@ -3905,14 +3266,6 @@ pkg_tempo_fetch = git
pkg_tempo_repo = https://github.com/selectel/tempo
pkg_tempo_commit = master
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
PACKAGES += tinymq
pkg_tinymq_name = tinymq
pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
@@ -3969,14 +3322,6 @@ pkg_trane_fetch = git
pkg_trane_repo = https://github.com/massemanet/trane
pkg_trane_commit = master
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
PACKAGES += trie
pkg_trie_name = trie
pkg_trie_description = Erlang Trie Implementation
@@ -4001,30 +3346,6 @@ pkg_tunctl_fetch = git
pkg_tunctl_repo = https://github.com/msantos/tunctl
pkg_tunctl_commit = master
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
PACKAGES += unicorn
pkg_unicorn_name = unicorn
pkg_unicorn_description = Generic configuration server
@@ -4057,14 +3378,6 @@ pkg_ux_fetch = git
pkg_ux_repo = https://github.com/erlang-unicode/ux
pkg_ux_commit = master
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
PACKAGES += verx
pkg_verx_name = verx
pkg_verx_description = Erlang implementation of the libvirtd remote protocol
@@ -4073,14 +3386,6 @@ pkg_verx_fetch = git
pkg_verx_repo = https://github.com/msantos/verx
pkg_verx_commit = master
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
PACKAGES += vmq_bridge
pkg_vmq_bridge_name = vmq_bridge
pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
@@ -4089,46 +3394,6 @@ pkg_vmq_bridge_fetch = git
pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
pkg_vmq_bridge_commit = master
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
PACKAGES += vmstats
pkg_vmstats_name = vmstats
pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
@@ -4167,7 +3432,7 @@ pkg_worker_pool_description = a simple erlang worker pool
pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
pkg_worker_pool_fetch = git
pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
+pkg_worker_pool_commit = main
PACKAGES += wrangler
pkg_wrangler_name = wrangler
@@ -4225,30 +3490,6 @@ pkg_yaws_fetch = git
pkg_yaws_repo = https://github.com/klacke/yaws
pkg_yaws_commit = master
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
PACKAGES += zippers
pkg_zippers_name = zippers
pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
@@ -4265,14 +3506,6 @@ pkg_zlists_fetch = git
pkg_zlists_repo = https://github.com/vjache/erlang-zlists
pkg_zlists_commit = master
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
PACKAGES += zucchini
pkg_zucchini_name = zucchini
pkg_zucchini_description = An Erlang INI parser
@@ -4331,19 +3564,13 @@ export DEPS_DIR
REBAR_DEPS_DIR = $(DEPS_DIR)
export REBAR_DEPS_DIR
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
+REBAR3_GIT ?= https://github.com/erlang/rebar3
+REBAR3_COMMIT ?= 06aaecd51b0ce828b66bb65a74d3c1fd7833a4ba # 3.22.1 + OTP-27 fixes
-HEX_CORE_GIT ?= https://github.com/hexpm/hex_core
-HEX_CORE_COMMIT ?= v0.7.0
+CACHE_DEPS ?= 0
-PACKAGES += hex_core
-pkg_hex_core_name = hex_core
-pkg_hex_core_description = Reference implementation of Hex specifications
-pkg_hex_core_homepage = $(HEX_CORE_GIT)
-pkg_hex_core_fetch = git
-pkg_hex_core_repo = $(HEX_CORE_GIT)
-pkg_hex_core_commit = $(HEX_CORE_COMMIT)
+CACHE_DIR ?= $(if $(XDG_CACHE_HOME),$(XDG_CACHE_HOME),$(HOME)/.cache)/erlang.mk
+export CACHE_DIR
# External "early" plugins (see core/plugins.mk for regular plugins).
# They both use the core_dep_plugin macro.
@@ -4527,6 +3754,9 @@ ifneq ($(ALL_DEPS_DIRS),)
echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
:; \
+ elif [ "$$dep" = "$(DEPS_DIR)/hut" -a "$(HUT_PATCH)" ]; then \
+ $(MAKE) -C $$dep app IS_DEP=1; \
+ if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
$(MAKE) -C $$dep IS_DEP=1; \
if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
@@ -4622,10 +3852,10 @@ define dep_autopatch_fetch_rebar
endef
define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
+ if [ ! -d $(ERLANG_MK_TMP)/rebar3 ]; then \
+ git clone -q -n -- $(REBAR3_GIT) $(ERLANG_MK_TMP)/rebar3; \
+ cd $(ERLANG_MK_TMP)/rebar3; \
+ git checkout -q $(REBAR3_COMMIT); \
./bootstrap; \
cd -; \
fi
@@ -4642,7 +3872,7 @@ endef
define dep_autopatch_rebar.erl
application:load(rebar),
application:set_env(rebar, log_level, debug),
- rmemo:start(),
+ {module, rebar3} = c:l(rebar3),
Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
{ok, Conf0} -> Conf0;
_ -> []
@@ -4676,7 +3906,7 @@ define dep_autopatch_rebar.erl
(V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
end,
fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+ Write("ERLC_OPTS = +debug_info\n"),
case lists:keyfind(erl_opts, 1, Conf) of
false -> ok;
{_, ErlOpts} ->
@@ -4699,12 +3929,23 @@ define dep_autopatch_rebar.erl
end,
Write("\n")
end(),
- GetHexVsn = fun(N, NP) ->
+ GetHexVsn2 = fun(N, NP) ->
case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
{ok, Lock} ->
io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
+ LockPkgs = case lists:keyfind("1.2.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ case lists:keyfind("1.1.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ false
+ end
+ end,
+ if
+ is_list(LockPkgs) ->
io:format("~p~n", [LockPkgs]),
case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
{_, {pkg, _, Vsn}, _} ->
@@ -4713,35 +3954,46 @@ define dep_autopatch_rebar.erl
_ ->
false
end;
- _ ->
+ true ->
false
end;
_ ->
false
end
end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
+ GetHexVsn3Common = fun(N, NP, S0) ->
+ case GetHexVsn2(N, NP) of
+ false ->
+ S2 = case S0 of
+ " " ++ S1 -> S1;
+ _ -> S0
+ end,
+ S = case length([ok || $$. <- S2]) of
+ 0 -> S2 ++ ".0.0";
+ 1 -> S2 ++ ".0";
+ _ -> S2
+ end,
+ {N, {hex, NP, S}};
+ NameSource ->
+ NameSource
+ end
+ end,
+ GetHexVsn3 = fun
+ (N, NP, "~>" ++ S0) ->
+ GetHexVsn3Common(N, NP, S0);
+ (N, NP, ">=" ++ S0) ->
+ GetHexVsn3Common(N, NP, S0);
+ (N, NP, S) -> {N, {hex, NP, S}}
end,
fun() ->
File = case lists:keyfind(deps, 1, Conf) of
false -> [];
{_, Deps} ->
[begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
+ N when is_atom(N) -> GetHexVsn2(N, N);
+ {N, S} when is_atom(N), is_list(S) -> GetHexVsn3(N, N, S);
+ {N, {pkg, NP}} when is_atom(N) -> GetHexVsn2(N, NP);
+ {N, S, {pkg, NP}} -> GetHexVsn3(N, NP, S);
{N, S} when is_tuple(S) -> {N, S};
{N, _, S} -> {N, S};
{N, _, S, _} -> {N, S};
@@ -4764,13 +4016,16 @@ define dep_autopatch_rebar.erl
fun() ->
case lists:keyfind(erl_first_files, 1, Conf) of
false -> ok;
- {_, Files} ->
+ {_, Files0} ->
+ Files = [begin
+ hd(filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)**/" ++ filename:rootname(F) ++ ".*rl"))
+ end || "src/" ++ F <- Files0],
Names = [[" ", case lists:reverse(F) of
"lre." ++ Elif -> lists:reverse(Elif);
"lrx." ++ Elif -> lists:reverse(Elif);
"lry." ++ Elif -> lists:reverse(Elif);
Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
+ end] || "$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ F <- Files],
Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
end
end(),
@@ -4797,6 +4052,8 @@ define dep_autopatch_rebar.erl
Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
{compile, Cmd} ->
Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+ {{pc, compile}, Cmd} ->
+ Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
{Regex, compile, Cmd} ->
case rebar_utils:is_arch(Regex) of
true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
@@ -4887,9 +4144,11 @@ define dep_autopatch_rebar.erl
[[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
Output, ": $$\(foreach ext,.c .C .cc .cpp,",
"$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+ "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(LDLIBS) $$\(EXE_LDFLAGS)",
case {filename:extension(Output), $(PLATFORM)} of
{[], _} -> "\n";
+ {".so", darwin} -> " -shared\n";
+ {".dylib", darwin} -> " -shared\n";
{_, darwin} -> "\n";
_ -> " -shared\n"
end])
@@ -4959,9 +4218,12 @@ endef
define dep_autopatch_appsrc_script.erl
AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
+ Conf1 = case file:consult(AppSrc) of
+ {ok, Conf0} -> Conf0;
+ {error, enoent} -> []
+ end,
Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
+ Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
Conf = case file:script(AppSrcScript, Bindings) of
{ok, [C]} -> C;
@@ -4991,9 +4253,39 @@ define dep_autopatch_appsrc.erl
halt()
endef
+ifeq ($(CACHE_DEPS),1)
+
+define dep_cache_fetch_git
+ mkdir -p $(CACHE_DIR)/git; \
+ if test -d "$(join $(CACHE_DIR)/git/,$(call dep_name,$1))"; then \
+ cd $(join $(CACHE_DIR)/git/,$(call dep_name,$1)); \
+ if ! git checkout -q $(call dep_commit,$1); then \
+ git remote set-url origin $(call dep_repo,$1) && \
+ git pull --all && \
+ git cat-file -e $(call dep_commit,$1) 2>/dev/null; \
+ fi; \
+ else \
+ git clone -q -n -- $(call dep_repo,$1) $(join $(CACHE_DIR)/git/,$(call dep_name,$1)); \
+ fi; \
+ git clone -q --branch $(call dep_commit,$1) --single-branch -- $(join $(CACHE_DIR)/git/,$(call dep_name,$1)) $2
+endef
+
define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+ $(call dep_cache_fetch_git,$1,$(DEPS_DIR)/$(call dep_name,$1));
+endef
+
+define dep_fetch_git-subfolder
+ mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
+ $(call dep_cache_fetch_git,$1,$(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)); \
+ ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$1)) \
+ $(DEPS_DIR)/$(call dep_name,$1);
+endef
+
+else
+
+define dep_fetch_git
+ git clone -q -n -- $(call dep_repo,$1) $(DEPS_DIR)/$(call dep_name,$1); \
+ cd $(DEPS_DIR)/$(call dep_name,$1) && git checkout -q $(call dep_commit,$1);
endef
define dep_fetch_git-subfolder
@@ -5002,10 +4294,12 @@ define dep_fetch_git-subfolder
$(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
&& git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
+ ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$1)) \
$(DEPS_DIR)/$(call dep_name,$1);
endef
+endif
+
define dep_fetch_git-submodule
git submodule update --init -- $(DEPS_DIR)/$1;
endef
@@ -5027,30 +4321,29 @@ define dep_fetch_ln
ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
endef
-# @todo Handle errors.
-define dep_fetch_hex.erl
- {ok, _} = application:ensure_all_started(ssl),
- {ok, _} = application:ensure_all_started(inets),
- Config = hex_core:default_config(),
- {ok, {200, #{}, Tarball}} = hex_repo:get_tarball(Config, <<"$(strip $3)">>, <<"$(strip $2)">>),
- {ok, #{}} = hex_tarball:unpack(Tarball, "$(DEPS_DIR)/$1"),
- halt(0)
+ifeq ($(CACHE_DEPS),1)
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+ mkdir -p $(CACHE_DIR)/hex $(DEPS_DIR)/$1; \
+ $(eval hex_tar_name=$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar) \
+ $(if $(wildcard $(CACHE_DIR)/hex/$(hex_tar_name)),,$(call core_http_get,$(CACHE_DIR)/hex/$(hex_tar_name),\
+ https://repo.hex.pm/tarballs/$(hex_tar_name);)) \
+ tar -xOf $(CACHE_DIR)/hex/$(hex_tar_name) contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
endef
+else
+
# Hex only has a package version. No need to look in the Erlang.mk packages.
define dep_fetch_hex
- if [ ! -e $(DEPS_DIR)/hex_core ]; then \
- echo "Error: Dependency hex_core missing. BUILD_DEPS += hex_core to fix." >&2; \
- exit 81; \
- fi; \
- if [ ! -e $(DEPS_DIR)/hex_core/ebin/dep_built ]; then \
- $(MAKE) -C $(DEPS_DIR)/hex_core IS_DEP=1; \
- touch $(DEPS_DIR)/hex_core/ebin/dep_built; \
- fi; \
- $(call erlang,$(call dep_fetch_hex.erl,$1,$(word 2,$(dep_$1)),\
- $(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)));
+ mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
+ $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
+ https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
+ tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
endef
+endif
+
define dep_fetch_fail
echo "Error: Unknown or invalid dependency: $(1)." >&2; \
exit 78;
@@ -5089,22 +4382,7 @@ endif
.PHONY: autopatch-$(call dep_name,$1)
autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
+ $(verbose) if [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
else \
$$(call dep_autopatch,$(call dep_name,$1)) \
@@ -5136,6 +4414,16 @@ distclean-deps:
$(gen_verbose) rm -rf $(DEPS_DIR)
endif
+ifeq ($(CACHE_DEPS),1)
+cacheclean:: cacheclean-git cacheclean-hex
+
+cacheclean-git:
+ $(gen_verbose) rm -rf $(CACHE_DIR)/git
+
+cacheclean-hex:
+ $(gen_verbose) rm -rf $(CACHE_DIR)/hex
+endif
+
# Forward-declare variables used in core/deps-tools.mk. This is required
# in case plugins use them.
@@ -5218,7 +4506,8 @@ define app_file
{id$(comma)$(space)"$(1)"}$(comma))
{modules, [$(call comma_list,$(2))]},
{registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(OPTIONAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {optional_applications, [$(call comma_list,$(OPTIONAL_DEPS))]},
{env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
]}.
endef
@@ -5230,7 +4519,8 @@ define app_file
{id$(comma)$(space)"$(1)"}$(comma))
{modules, [$(call comma_list,$(2))]},
{registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(OPTIONAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {optional_applications, [$(call comma_list,$(OPTIONAL_DEPS))]},
{mod, {$(PROJECT_MOD), []}},
{env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
]}.
@@ -5375,7 +4665,6 @@ define makedep.erl
end,
MakeDepend = fun
(F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
case io:parse_erl_form(Fd, undefined, StartLocation) of
{ok, AbsData, EndLocation} ->
case AbsData of
@@ -5726,8 +5015,8 @@ try
})
end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
+catch C:E$(if $V,:S) ->
+ io:format("Exception: ~p:~p~n$(if $V,Stacktrace: ~p~n)", [C, E$(if $V,$(comma) S)]),
halt(1)
end.
endef
@@ -5845,6 +5134,8 @@ endef
define bs_relx_config
{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
+{dev_mode, false}.
+{include_erts, true}.
{extended_start_script, true}.
{sys_config, "config/sys.config"}.
{vm_args, "config/vm.args"}.
@@ -6201,6 +5492,8 @@ endif
$(verbose) mkdir config/
$(verbose) $(call core_render,bs_sys_config,config/sys.config)
$(verbose) $(call core_render,bs_vm_args,config/vm.args)
+ $(verbose) awk '/^include erlang.mk/ && !ins {print "BUILD_DEPS += relx";ins=1};{print}' Makefile > Makefile.bak
+ $(verbose) mv Makefile.bak Makefile
new-app:
ifndef in
@@ -6274,17 +5567,24 @@ C_SRC_TYPE ?= shared
ifeq ($(PLATFORM),msys2)
C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+ C_SRC_OUTPUT_STATIC_EXTENSION ?= .lib
else
C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+ C_SRC_OUTPUT_STATIC_EXTENSION ?= .a
endif
ifeq ($(C_SRC_TYPE),shared)
C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else ifeq ($(C_SRC_TYPE),static)
+ C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_STATIC_EXTENSION)
else
C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
endif
+RANLIB ?= ranlib
+ARFLAGS ?= cr
+
ifeq ($(PLATFORM),msys2)
# We hardcode the compiler used on MSYS2. The default CC=cc does
# not produce working code. The "gcc" MSYS2 package also doesn't.
@@ -6294,9 +5594,9 @@ ifeq ($(PLATFORM),msys2)
CXXFLAGS ?= -O3 -finline-functions -Wall
else ifeq ($(PLATFORM),darwin)
CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+ CFLAGS ?= -O3 -std=c99 -Wall -Wmissing-prototypes
+ CXXFLAGS ?= -O3 -Wall
+ LDFLAGS ?= -flat_namespace -undefined suppress
else ifeq ($(PLATFORM),freebsd)
CC ?= cc
CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
@@ -6312,6 +5612,11 @@ ifneq ($(PLATFORM),msys2)
CXXFLAGS += -fPIC
endif
+ifeq ($(C_SRC_TYPE),static)
+ CFLAGS += -DSTATIC_ERLANG_NIF=1
+ CXXFLAGS += -DSTATIC_ERLANG_NIF=1
+endif
+
CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
@@ -6328,6 +5633,12 @@ cpp_verbose = $(cpp_verbose_$(V))
link_verbose_0 = @echo " LD " $(@F);
link_verbose = $(link_verbose_$(V))
+ar_verbose_0 = @echo " AR " $(@F);
+ar_verbose = $(ar_verbose_$(V))
+
+ranlib_verbose_0 = @echo " RANLIB" $(@F);
+ranlib_verbose = $(ranlib_verbose_$(V))
+
# Targets.
ifeq ($(wildcard $(C_SRC_DIR)),)
@@ -6356,11 +5667,19 @@ app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+ifneq ($(C_SRC_TYPE),static)
$(C_SRC_OUTPUT_FILE): $(OBJECTS)
$(verbose) mkdir -p $(dir $@)
$(link_verbose) $(CC) $(OBJECTS) \
$(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
-o $(C_SRC_OUTPUT_FILE)
+else
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+ $(verbose) mkdir -p $(dir $@)
+ $(ar_verbose) $(AR) $(ARFLAGS) $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+ $(ranlib_verbose) $(RANLIB) $(C_SRC_OUTPUT_FILE)
+endif
+
$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
@@ -6511,24 +5830,14 @@ endif
.PHONY: ci ci-prepare ci-setup
CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
+ifeq ($(strip $(CI_OTP)),)
ci::
else
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
+ci:: $(addprefix ci-,$(CI_OTP))
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
+ci-prepare: $(addprefix ci-prepare-,$(CI_OTP))
ci-setup::
$(verbose) :
@@ -6540,7 +5849,10 @@ ci_verbose_0 = @echo " CI " $(1);
ci_verbose = $(ci_verbose_$(V))
define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
+ci-prepare-$1: $(KERL_INSTALL_DIR)/$2
+ $(verbose) :
+
+ci-$1: ci-prepare-$1
$(verbose) $(MAKE) --no-print-directory clean
$(ci_verbose) \
PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
@@ -6552,11 +5864,8 @@ ci-$1: $(KERL_INSTALL_DIR)/$2
endef
$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
help::
$(verbose) printf "%s\n" "" \
@@ -6716,9 +6025,9 @@ endif
endif
define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+ct-$1: test-build
+ $$(verbose) mkdir -p $$(CT_LOGS_DIR)
+ $$(gen_verbose_esc) $$(CT_RUN) -sname ct_$$(PROJECT) -suite $$(addsuffix _SUITE,$1) $$(CT_EXTRA) $$(CT_OPTS)
endef
$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
@@ -6738,7 +6047,7 @@ export DIALYZER_PLT
PLT_APPS ?=
DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+DIALYZER_OPTS ?= -Werror_handling -Wunmatched_returns # -Wunderspecs
DIALYZER_PLT_OPTS ?=
# Core targets.
@@ -6797,7 +6106,7 @@ dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
else
dialyze: $(DIALYZER_PLT)
endif
- $(verbose) dialyzer --no_native `$(ERL) \
+ $(verbose) dialyzer `$(ERL) \
-eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
-extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
@@ -6814,7 +6123,11 @@ EDOC_OUTPUT ?= doc
define edoc.erl
SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
+ filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}")
+ ++ lists:filter(fun(D) ->
+ filelib:is_dir(D)
+ end, filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}/**"))
+ ++ Acc
end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
@@ -6931,11 +6244,11 @@ help::
escript-zip:: FULL=1
escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
+ $(verbose) mkdir -p $(dir $(abspath $(ESCRIPT_ZIP_FILE)))
+ $(verbose) rm -f $(abspath $(ESCRIPT_ZIP_FILE))
+ $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(abspath $(ESCRIPT_ZIP_FILE)) $(PROJECT)/ebin/*
ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
+ $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(abspath $(ESCRIPT_ZIP_FILE)) \
$(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
$(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
endif
@@ -6945,11 +6258,11 @@ escript:: escript-zip
"#!$(ESCRIPT_SHEBANG)" \
"%% $(ESCRIPT_COMMENT)" \
"%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
+ $(verbose) cat $(abspath $(ESCRIPT_ZIP_FILE)) >> $(ESCRIPT_FILE)
$(verbose) chmod +x $(ESCRIPT_FILE)
distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
+ $(gen_verbose) rm -f $(ESCRIPT_FILE) $(abspath $(ESCRIPT_ZIP_FILE))
# Copyright (c) 2015-2016, Loïc Hoguin <[email protected]>
# Copyright (c) 2014, Enrique Fernandez <[email protected]>
@@ -6961,6 +6274,7 @@ distclean-escript:
EUNIT_OPTS ?=
EUNIT_ERL_OPTS ?=
+EUNIT_TEST_SPEC ?= $1
# Core targets.
@@ -6976,7 +6290,7 @@ help::
define eunit.erl
$(call cover.erl)
CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
+ case eunit:test($(call EUNIT_TEST_SPEC,$1), [$(EUNIT_OPTS)]) of
ok -> ok;
error -> halt(2)
end,
@@ -7017,6 +6331,17 @@ endif
# Copyright (c) 2020, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
+HEX_CORE_GIT ?= https://github.com/hexpm/hex_core
+HEX_CORE_COMMIT ?= v0.7.0
+
+PACKAGES += hex_core
+pkg_hex_core_name = hex_core
+pkg_hex_core_description = Reference implementation of Hex specifications
+pkg_hex_core_homepage = $(HEX_CORE_GIT)
+pkg_hex_core_fetch = git
+pkg_hex_core_repo = $(HEX_CORE_GIT)
+pkg_hex_core_commit = $(HEX_CORE_COMMIT)
+
# We automatically depend on hex_core when the project isn't already.
$(if $(filter hex_core,$(DEPS) $(BUILD_DEPS) $(DOC_DEPS) $(REL_DEPS) $(TEST_DEPS)),,\
$(eval $(call dep_target,hex_core)))
@@ -7037,7 +6362,6 @@ define hex_config.erl
end
endef
-# @todo Something is wrong about the password I couldn't log into hex.pm.
define hex_user_create.erl
{ok, _} = application:ensure_all_started(ssl),
{ok, _} = application:ensure_all_started(inets),
@@ -7101,14 +6425,14 @@ HEX_TARBALL_FILES ?= \
$(wildcard ebin/$(PROJECT).app) \
$(wildcard ebin/$(PROJECT).appup) \
$(wildcard $(notdir $(ERLANG_MK_FILENAME))) \
- $(call core_find,include/,*.hrl) \
+ $(sort $(call core_find,include/,*.hrl)) \
$(wildcard LICENSE*) \
$(wildcard Makefile) \
$(wildcard plugins.mk) \
- $(call core_find,priv/,*) \
+ $(sort $(call core_find,priv/,*)) \
$(wildcard README*) \
$(wildcard rebar.config) \
- $(call core_find,src/,*)
+ $(sort $(call core_find,src/,*))
HEX_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT).tar
@@ -7288,6 +6612,59 @@ hex-release-unretire: hex-core
$(gen_verbose) $(call erlang,$(call hex_release_unretire.erl,$(HEX_SECRET),\
$(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+HEX_DOCS_DOC_DIR ?= doc/
+HEX_DOCS_TARBALL_FILES ?= $(sort $(call core_find,$(HEX_DOCS_DOC_DIR),*))
+HEX_DOCS_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT)-docs.tar.gz
+
+$(HEX_DOCS_TARBALL_OUTPUT_FILE): hex-core app docs
+ $(hex_tar_verbose) tar czf $(HEX_DOCS_TARBALL_OUTPUT_FILE) -C $(HEX_DOCS_DOC_DIR) \
+ $(HEX_DOCS_TARBALL_FILES:$(HEX_DOCS_DOC_DIR)%=%)
+
+hex-docs-tarball-create: $(HEX_DOCS_TARBALL_OUTPUT_FILE)
+
+define hex_docs_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_DOCS_TARBALL_OUTPUT_FILE))"),
+ case hex_api:post(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $(PROJECT_VERSION))", "docs"],
+ {"application/octet-stream", Tarball}) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(88)
+ end
+endef
+
+hex-docs-publish: hex-core hex-docs-tarball-create
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_publish.erl,$(HEX_SECRET)))
+
+define hex_docs_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api:delete(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $2)", "docs"]) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs removed~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(89)
+ end
+endef
+
+hex-docs-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_delete.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
# Copyright (c) 2015-2017, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -7327,8 +6704,8 @@ define proper_check.erl
end of
true -> halt(0);
_ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
+ catch error:undef$(if $V,:Stacktrace) ->
+ io:format("Undefined property or module?~n$(if $V,~p~n)", [$(if $V,Stacktrace)]),
halt(0)
end.
endef
@@ -7393,10 +6770,13 @@ else
define compile_proto.erl
[begin
gpb_compile:file(F, [
+ $(foreach i,$(sort $(dir $(PROTO_FILES))),{i$(comma) "$i"}$(comma))
{include_as_lib, true},
{module_name_suffix, "_pb"},
{o_hrl, "./include"},
- {o_erl, "./src"}])
+ {o_erl, "./src"},
+ {use_packages, true}
+ ])
end || F <- string:tokens("$1", " ")],
halt().
endef
@@ -7413,15 +6793,14 @@ endif
# Copyright (c) 2013-2016, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
+ifeq ($(filter relx,$(BUILD_DEPS) $(DEPS) $(REL_DEPS)),relx)
.PHONY: relx-rel relx-relup distclean-relx-rel run
# Configuration.
-RELX ?= $(ERLANG_MK_TMP)/relx
RELX_CONFIG ?= $(CURDIR)/relx.config
+RELX_CONFIG_SCRIPT ?= $(CURDIR)/relx.config.script
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
RELX_OUTPUT_DIR ?= _rel
RELX_REL_EXT ?=
RELX_TAR ?= 1
@@ -7430,16 +6809,10 @@ ifdef SFX
RELX_TAR = 1
endif
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
# Core targets.
ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
+ifneq ($(wildcard $(RELX_CONFIG))$(wildcard $(RELX_CONFIG_SCRIPT)),)
rel:: relx-rel
relup:: relx-relup
@@ -7450,21 +6823,85 @@ distclean:: distclean-relx-rel
# Plugin-specific targets.
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
+define relx_get_config.erl
+ (fun() ->
+ Config0 =
+ case file:consult("$(call core_native_path,$(RELX_CONFIG))") of
+ {ok, Terms} ->
+ Terms;
+ {error, _} ->
+ []
+ end,
+ case filelib:is_file("$(call core_native_path,$(RELX_CONFIG_SCRIPT))") of
+ true ->
+ Bindings = erl_eval:add_binding('CONFIG', Config0, erl_eval:new_bindings()),
+ {ok, Config1} = file:script("$(call core_native_path,$(RELX_CONFIG_SCRIPT))", Bindings),
+ Config1;
+ false ->
+ Config0
+ end
+ end)()
+endef
+
+define relx_release.erl
+ Config = $(call relx_get_config.erl),
+ {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
+ Vsn = case Vsn0 of
+ {cmd, Cmd} -> os:cmd(Cmd);
+ semver -> "";
+ {semver, _} -> "";
+ {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n");
+ {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n");
+ VsnStr -> Vsn0
+ end,
+ {ok, _} = relx:build_release(#{name => Name, vsn => Vsn}, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]),
+ halt(0).
+endef
+
+define relx_tar.erl
+ Config = $(call relx_get_config.erl),
+ {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
+ Vsn = case Vsn0 of
+ {cmd, Cmd} -> os:cmd(Cmd);
+ semver -> "";
+ {semver, _} -> "";
+ {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n");
+ {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n");
+ VsnStr -> Vsn0
+ end,
+ {ok, _} = relx:build_tar(#{name => Name, vsn => Vsn}, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]),
+ halt(0).
+endef
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
+define relx_relup.erl
+ Config = $(call relx_get_config.erl),
+ {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
+ Vsn = case Vsn0 of
+ {cmd, Cmd} -> os:cmd(Cmd);
+ semver -> "";
+ {semver, _} -> "";
+ {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n");
+ {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n");
+ VsnStr -> Vsn0
+ end,
+ {ok, _} = relx:build_relup(Name, Vsn, undefined, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]),
+ halt(0).
+endef
+
+relx-rel: rel-deps app
+ $(call erlang,$(call relx_release.erl),-pa ebin/)
$(verbose) $(MAKE) relx-post-rel
ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
+ $(call erlang,$(call relx_tar.erl),-pa ebin/)
endif
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
+relx-relup: rel-deps app
+ $(call erlang,$(call relx_release.erl),-pa ebin/)
$(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
+ $(call erlang,$(call relx_relup.erl),-pa ebin/)
+ifeq ($(RELX_TAR),1)
+ $(call erlang,$(call relx_tar.erl),-pa ebin/)
+endif
distclean-relx-rel:
$(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
@@ -7475,17 +6912,19 @@ relx-post-rel::
# Run target.
-ifeq ($(wildcard $(RELX_CONFIG)),)
+ifeq ($(wildcard $(RELX_CONFIG))$(wildcard $(RELX_CONFIG_SCRIPT)),)
run::
else
define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
+ Config = $(call relx_get_config.erl),
{release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
Vsn = case Vsn0 of
{cmd, Cmd} -> os:cmd(Cmd);
semver -> "";
{semver, _} -> "";
+ {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n");
+ {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n");
VsnStr -> Vsn0
end,
Extended = case lists:keyfind(extended_start_script, 1, Config) of
@@ -7512,7 +6951,7 @@ ifdef RELOAD
rel::
$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
+ eval "io:format(\"~p~n\", [c:lm()])."
endif
help::
@@ -7521,6 +6960,7 @@ help::
" run Compile the project, build the release and run it"
endif
+endif
# Copyright (c) 2015-2016, Loïc Hoguin <[email protected]>
# Copyright (c) 2014, M Robert Martin <[email protected]>
@@ -7669,8 +7109,8 @@ define triq_check.erl
end of
true -> halt(0);
_ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
+ catch error:undef$(if $V,:Stacktrace) ->
+ io:format("Undefined property or module?~n$(if $V,~p~n)", [$(if $V,Stacktrace)]),
halt(0)
end.
endef
@@ -7692,45 +7132,224 @@ triq: test-build cover-data-dir
endif
endif
-# Copyright (c) 2016, Loïc Hoguin <[email protected]>
-# Copyright (c) 2015, Erlang Solutions Ltd.
+# Copyright (c) 2022, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
-.PHONY: xref distclean-xref
+.PHONY: xref
# Configuration.
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
+# We do not use locals_not_used or deprecated_function_calls
+# because the compiler will error out by default in those
+# cases with Erlang.mk. Deprecated functions may make sense
+# in some cases but few libraries define them. We do not
+# use exports_not_used by default because it hinders more
+# than it helps library projects such as Cowboy. Finally,
+# undefined_functions provides little that undefined_function_calls
+# doesn't already provide, so it's not enabled by default.
+XREF_CHECKS ?= [undefined_function_calls]
+
+# Instead of predefined checks a query can be evaluated
+# using the Xref DSL. The $q variable is used in that case.
+
+# The scope is a list of keywords that correspond to
+# application directories, being essentially an easy way
+# to configure which applications to analyze. With:
+#
+# - app: .
+# - apps: $(ALL_APPS_DIRS)
+# - deps: $(ALL_DEPS_DIRS)
+# - otp: Built-in Erlang/OTP applications.
+#
+# The default is conservative (app) and will not be
+# appropriate for all types of queries (for example
+# application_call requires adding all applications
+# that might be called or they will not be found).
+XREF_SCOPE ?= app # apps deps otp
+
+# If the above is not enough, additional application
+# directories can be configured.
+XREF_EXTRA_APP_DIRS ?=
+
+# As well as additional non-application directories.
+XREF_EXTRA_DIRS ?=
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
+# Erlang.mk supports -ignore_xref([...]) with forms
+# {M, F, A} | {F, A} | M, the latter ignoring whole
+# modules. Ignores can also be provided project-wide.
+XREF_IGNORE ?= []
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
+# All callbacks may be ignored. Erlang.mk will ignore
+# them automatically for exports_not_used (unless it
+# is explicitly disabled by the user).
+XREF_IGNORE_CALLBACKS ?=
# Core targets.
help::
$(verbose) printf '%s\n' '' \
'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
+ ' xref Analyze the project using Xref' \
+ ' xref q=QUERY Evaluate an Xref query'
# Plugin-specific targets.
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+define xref.erl
+ {ok, Xref} = xref:start([]),
+ Scope = [$(call comma_list,$(XREF_SCOPE))],
+ AppDirs0 = [$(call comma_list,$(foreach d,$(XREF_EXTRA_APP_DIRS),"$d"))],
+ AppDirs1 = case lists:member(otp, Scope) of
+ false -> AppDirs0;
+ true ->
+ RootDir = code:root_dir(),
+ AppDirs0 ++ [filename:dirname(P) || P <- code:get_path(), lists:prefix(RootDir, P)]
+ end,
+ AppDirs2 = case lists:member(deps, Scope) of
+ false -> AppDirs1;
+ true -> [$(call comma_list,$(foreach d,$(ALL_DEPS_DIRS),"$d"))] ++ AppDirs1
+ end,
+ AppDirs3 = case lists:member(apps, Scope) of
+ false -> AppDirs2;
+ true -> [$(call comma_list,$(foreach d,$(ALL_APPS_DIRS),"$d"))] ++ AppDirs2
+ end,
+ AppDirs = case lists:member(app, Scope) of
+ false -> AppDirs3;
+ true -> ["../$(notdir $(CURDIR))"|AppDirs3]
+ end,
+ [{ok, _} = xref:add_application(Xref, AppDir, [{builtins, true}]) || AppDir <- AppDirs],
+ ExtraDirs = [$(call comma_list,$(foreach d,$(XREF_EXTRA_DIRS),"$d"))],
+ [{ok, _} = xref:add_directory(Xref, ExtraDir, [{builtins, true}]) || ExtraDir <- ExtraDirs],
+ ok = xref:set_library_path(Xref, code:get_path() -- (["ebin", "."] ++ AppDirs ++ ExtraDirs)),
+ Checks = case {$1, is_list($2)} of
+ {check, true} -> $2;
+ {check, false} -> [$2];
+ {query, _} -> [$2]
+ end,
+ FinalRes = [begin
+ IsInformational = case $1 of
+ query -> true;
+ check ->
+ is_tuple(Check) andalso
+ lists:member(element(1, Check),
+ [call, use, module_call, module_use, application_call, application_use])
+ end,
+ {ok, Res0} = case $1 of
+ check -> xref:analyze(Xref, Check);
+ query -> xref:q(Xref, Check)
+ end,
+ Res = case IsInformational of
+ true -> Res0;
+ false ->
+ lists:filter(fun(R) ->
+ {Mod, InMFA, MFA} = case R of
+ {InMFA0 = {M, _, _}, MFA0} -> {M, InMFA0, MFA0};
+ {M, _, _} -> {M, R, R}
+ end,
+ Attrs = try
+ Mod:module_info(attributes)
+ catch error:undef ->
+ []
+ end,
+ InlineIgnores = lists:flatten([
+ [case V of
+ M when is_atom(M) -> {M, '_', '_'};
+ {F, A} -> {Mod, F, A};
+ _ -> V
+ end || V <- Values]
+ || {ignore_xref, Values} <- Attrs]),
+ BuiltinIgnores = [
+ {eunit_test, wrapper_test_exported_, 0}
+ ],
+ DoCallbackIgnores = case {Check, "$(strip $(XREF_IGNORE_CALLBACKS))"} of
+ {exports_not_used, ""} -> true;
+ {_, "0"} -> false;
+ _ -> true
+ end,
+ CallbackIgnores = case DoCallbackIgnores of
+ false -> [];
+ true ->
+ Behaviors = lists:flatten([
+ [BL || {behavior, BL} <- Attrs],
+ [BL || {behaviour, BL} <- Attrs]
+ ]),
+ [{Mod, CF, CA} || B <- Behaviors, {CF, CA} <- B:behaviour_info(callbacks)]
+ end,
+ WideIgnores = if
+ is_list($(XREF_IGNORE)) ->
+ [if is_atom(I) -> {I, '_', '_'}; true -> I end
+ || I <- $(XREF_IGNORE)];
+ true -> [$(XREF_IGNORE)]
+ end,
+ Ignores = InlineIgnores ++ BuiltinIgnores ++ CallbackIgnores ++ WideIgnores,
+ not (lists:member(InMFA, Ignores)
+ orelse lists:member(MFA, Ignores)
+ orelse lists:member({Mod, '_', '_'}, Ignores))
+ end, Res0)
+ end,
+ case Res of
+ [] -> ok;
+ _ when IsInformational ->
+ case Check of
+ {call, {CM, CF, CA}} ->
+ io:format("Functions that ~s:~s/~b calls:~n", [CM, CF, CA]);
+ {use, {CM, CF, CA}} ->
+ io:format("Function ~s:~s/~b is called by:~n", [CM, CF, CA]);
+ {module_call, CMod} ->
+ io:format("Modules that ~s calls:~n", [CMod]);
+ {module_use, CMod} ->
+ io:format("Module ~s is used by:~n", [CMod]);
+ {application_call, CApp} ->
+ io:format("Applications that ~s calls:~n", [CApp]);
+ {application_use, CApp} ->
+ io:format("Application ~s is used by:~n", [CApp]);
+ _ when $1 =:= query ->
+ io:format("Query ~s returned:~n", [Check])
+ end,
+ [case R of
+ {{InM, InF, InA}, {M, F, A}} ->
+ io:format("- ~s:~s/~b called by ~s:~s/~b~n",
+ [M, F, A, InM, InF, InA]);
+ {M, F, A} ->
+ io:format("- ~s:~s/~b~n", [M, F, A]);
+ ModOrApp ->
+ io:format("- ~s~n", [ModOrApp])
+ end || R <- Res],
+ ok;
+ _ ->
+ [case {Check, R} of
+ {undefined_function_calls, {{InM, InF, InA}, {M, F, A}}} ->
+ io:format("Undefined function ~s:~s/~b called by ~s:~s/~b~n",
+ [M, F, A, InM, InF, InA]);
+ {undefined_functions, {M, F, A}} ->
+ io:format("Undefined function ~s:~s/~b~n", [M, F, A]);
+ {locals_not_used, {M, F, A}} ->
+ io:format("Unused local function ~s:~s/~b~n", [M, F, A]);
+ {exports_not_used, {M, F, A}} ->
+ io:format("Unused exported function ~s:~s/~b~n", [M, F, A]);
+ {deprecated_function_calls, {{InM, InF, InA}, {M, F, A}}} ->
+ io:format("Deprecated function ~s:~s/~b called by ~s:~s/~b~n",
+ [M, F, A, InM, InF, InA]);
+ {deprecated_functions, {M, F, A}} ->
+ io:format("Deprecated function ~s:~s/~b~n", [M, F, A]);
+ _ ->
+ io:format("~p: ~p~n", [Check, R])
+ end || R <- Res],
+ error
+ end
+ end || Check <- Checks],
+ stopped = xref:stop(Xref),
+ case lists:usort(FinalRes) of
+ [ok] -> halt(0);
+ _ -> halt(1)
+ end
+endef
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
+xref: deps app
+ifdef q
+ $(verbose) $(call erlang,$(call xref.erl,query,"$q"),-pa ebin/)
+else
+ $(verbose) $(call erlang,$(call xref.erl,check,$(XREF_CHECKS)),-pa ebin/)
+endif
# Copyright (c) 2016, Loïc Hoguin <[email protected]>
# Copyright (c) 2015, Viktor Söderqvist <[email protected]>
@@ -7742,6 +7361,7 @@ COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
ifdef COVER
COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
COVER_DEPS ?=
+COVER_EXCLUDE_MODS ?=
endif
# Code coverage for Common Test.
@@ -7757,7 +7377,8 @@ $(TEST_DIR)/ct.cover.spec: cover-data-dir
"{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
$(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
$(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
+ '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' \
+ "{excl_mods, '$(PROJECT)', [$(call comma_list,$(COVER_EXCLUDE_MODS))]}." > $@
CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
endif
@@ -7772,14 +7393,18 @@ define cover.erl
Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
$(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
$(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
+ Excludes = [$(call comma_list,$(foreach e,$(COVER_EXCLUDE_MODS),"$e"))],
+ [case file:list_dir(Dir) of
+ {error, enotdir} -> false;
+ {error, _} -> halt(2);
+ {ok, Files} ->
+ BeamFiles = [filename:join(Dir, File) ||
+ File <- Files,
+ not lists:member(filename:basename(File, ".beam"), Excludes),
+ filename:extension(File) =:= ".beam"],
+ case cover:compile_beam(BeamFiles) of
+ {error, _} -> halt(1);
+ _ -> true
end
end || Dir <- Dirs]
end,
diff --git a/include/cow_inline.hrl b/include/cow_inline.hrl
index f0d12eb..1ad417e 100644
--- a/include/cow_inline.hrl
+++ b/include/cow_inline.hrl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2014-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2014-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/include/cow_parse.hrl b/include/cow_parse.hrl
index ee4af70..72eaff6 100644
--- a/include/cow_parse.hrl
+++ b/include/cow_parse.hrl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2015-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2015-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_base64url.erl b/src/cow_base64url.erl
index 17ec46c..e591fcf 100644
--- a/src/cow_base64url.erl
+++ b/src/cow_base64url.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2017-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2017-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_cookie.erl b/src/cow_cookie.erl
index 93a8e61..11cf339 100644
--- a/src/cow_cookie.erl
+++ b/src/cow_cookie.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2020, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -26,7 +26,7 @@
path => binary(),
secure => true,
http_only => true,
- same_site => strict | lax | none
+ same_site => default | none | strict | lax
}.
-export_type([cookie_attrs/0]).
@@ -35,7 +35,7 @@
http_only => boolean(),
max_age => non_neg_integer(),
path => binary(),
- same_site => strict | lax | none,
+ same_site => default | none | strict | lax,
secure => boolean()
}.
-export_type([cookie_opts/0]).
@@ -173,17 +173,32 @@ parse_cookie_error_test_() ->
-> {ok, binary(), binary(), cookie_attrs()}
| ignore.
parse_set_cookie(SetCookie) ->
- {NameValuePair, UnparsedAttrs} = take_until_semicolon(SetCookie, <<>>),
- {Name, Value} = case binary:split(NameValuePair, <<$=>>) of
- [Value0] -> {<<>>, trim(Value0)};
- [Name0, Value0] -> {trim(Name0), trim(Value0)}
- end,
- case {Name, Value} of
- {<<>>, <<>>} ->
+ case has_non_ws_ctl(SetCookie) of
+ true ->
ignore;
- _ ->
- Attrs = parse_set_cookie_attrs(UnparsedAttrs, #{}),
- {ok, Name, Value, Attrs}
+ false ->
+ {NameValuePair, UnparsedAttrs} = take_until_semicolon(SetCookie, <<>>),
+ {Name, Value} = case binary:split(NameValuePair, <<$=>>) of
+ [Value0] -> {<<>>, trim(Value0)};
+ [Name0, Value0] -> {trim(Name0), trim(Value0)}
+ end,
+ case {Name, Value} of
+ {<<>>, <<>>} ->
+ ignore;
+ _ ->
+ Attrs = parse_set_cookie_attrs(UnparsedAttrs, #{}),
+ {ok, Name, Value, Attrs}
+ end
+ end.
+
+has_non_ws_ctl(<<>>) ->
+ false;
+has_non_ws_ctl(<<C,R/bits>>) ->
+ if
+ C =< 16#08 -> true;
+ C >= 16#0A, C =< 16#1F -> true;
+ C =:= 16#7F -> true;
+ true -> has_non_ws_ctl(R)
end.
parse_set_cookie_attrs(<<>>, Attrs) ->
@@ -194,13 +209,18 @@ parse_set_cookie_attrs(<<$;,Rest0/bits>>, Attrs) ->
[Name0] -> {trim(Name0), <<>>};
[Name0, Value0] -> {trim(Name0), trim(Value0)}
end,
- case parse_set_cookie_attr(?LOWER(Name), Value) of
- {ok, AttrName, AttrValue} ->
- parse_set_cookie_attrs(Rest, Attrs#{AttrName => AttrValue});
- {ignore, AttrName} ->
- parse_set_cookie_attrs(Rest, maps:remove(AttrName, Attrs));
- ignore ->
- parse_set_cookie_attrs(Rest, Attrs)
+ if
+ byte_size(Value) > 1024 ->
+ parse_set_cookie_attrs(Rest, Attrs);
+ true ->
+ case parse_set_cookie_attr(?LOWER(Name), Value) of
+ {ok, AttrName, AttrValue} ->
+ parse_set_cookie_attrs(Rest, Attrs#{AttrName => AttrValue});
+ {ignore, AttrName} ->
+ parse_set_cookie_attrs(Rest, maps:remove(AttrName, Attrs));
+ ignore ->
+ parse_set_cookie_attrs(Rest, Attrs)
+ end
end.
take_until_semicolon(Rest = <<$;,_/bits>>, Acc) -> {Acc, Rest};
@@ -254,16 +274,15 @@ parse_set_cookie_attr(<<"httponly">>, _) ->
{ok, http_only, true};
parse_set_cookie_attr(<<"samesite">>, Value) ->
case ?LOWER(Value) of
+ <<"none">> ->
+ {ok, same_site, none};
<<"strict">> ->
{ok, same_site, strict};
<<"lax">> ->
{ok, same_site, lax};
- %% Clients may have different defaults than "None".
- <<"none">> ->
- {ok, same_site, none};
%% Unknown values and lack of value are equivalent.
_ ->
- ignore
+ {ok, same_site, default}
end;
parse_set_cookie_attr(_, _) ->
ignore.
@@ -282,6 +301,10 @@ parse_set_cookie_test_() ->
{ok, <<"a">>, <<"b">>, #{domain => <<"foo.example.org">>}}},
{<<"a=b; Path=/path/to/resource; Path=/">>,
{ok, <<"a">>, <<"b">>, #{path => <<"/">>}}},
+ {<<"a=b; SameSite=UnknownValue">>, {ok, <<"a">>, <<"b">>, #{same_site => default}}},
+ {<<"a=b; SameSite=None">>, {ok, <<"a">>, <<"b">>, #{same_site => none}}},
+ {<<"a=b; SameSite=Lax">>, {ok, <<"a">>, <<"b">>, #{same_site => lax}}},
+ {<<"a=b; SameSite=Strict">>, {ok, <<"a">>, <<"b">>, #{same_site => strict}}},
{<<"a=b; SameSite=Lax; SameSite=Strict">>,
{ok, <<"a">>, <<"b">>, #{same_site => strict}}}
],
@@ -331,7 +354,7 @@ setcookie(Name, Value, Opts) ->
<<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]),
nomatch = binary:match(iolist_to_binary(Value), [<<$,>>, <<$;>>,
<<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]),
- [Name, <<"=">>, Value, <<"; Version=1">>, attributes(maps:to_list(Opts))].
+ [Name, <<"=">>, Value, attributes(maps:to_list(Opts))].
attributes([]) -> [];
attributes([{domain, Domain}|Tail]) -> [<<"; Domain=">>, Domain|attributes(Tail)];
@@ -349,9 +372,10 @@ attributes([Opt={max_age, _}|_]) ->
attributes([{path, Path}|Tail]) -> [<<"; Path=">>, Path|attributes(Tail)];
attributes([{secure, false}|Tail]) -> attributes(Tail);
attributes([{secure, true}|Tail]) -> [<<"; Secure">>|attributes(Tail)];
+attributes([{same_site, default}|Tail]) -> attributes(Tail);
+attributes([{same_site, none}|Tail]) -> [<<"; SameSite=None">>|attributes(Tail)];
attributes([{same_site, lax}|Tail]) -> [<<"; SameSite=Lax">>|attributes(Tail)];
attributes([{same_site, strict}|Tail]) -> [<<"; SameSite=Strict">>|attributes(Tail)];
-attributes([{same_site, none}|Tail]) -> [<<"; SameSite=None">>|attributes(Tail)];
%% Skip unknown options.
attributes([_|Tail]) -> attributes(Tail).
@@ -361,26 +385,32 @@ setcookie_test_() ->
Tests = [
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{http_only => true, domain => <<"acme.com">>},
- <<"Customer=WILE_E_COYOTE; Version=1; "
+ <<"Customer=WILE_E_COYOTE; "
"Domain=acme.com; HttpOnly">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{path => <<"/acme">>},
- <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>},
+ <<"Customer=WILE_E_COYOTE; Path=/acme">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{secure => true},
- <<"Customer=WILE_E_COYOTE; Version=1; Secure">>},
+ <<"Customer=WILE_E_COYOTE; Secure">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{secure => false, http_only => false},
- <<"Customer=WILE_E_COYOTE; Version=1">>},
+ <<"Customer=WILE_E_COYOTE">>},
+ {<<"Customer">>, <<"WILE_E_COYOTE">>,
+ #{same_site => default},
+ <<"Customer=WILE_E_COYOTE">>},
+ {<<"Customer">>, <<"WILE_E_COYOTE">>,
+ #{same_site => none},
+ <<"Customer=WILE_E_COYOTE; SameSite=None">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{same_site => lax},
- <<"Customer=WILE_E_COYOTE; Version=1; SameSite=Lax">>},
+ <<"Customer=WILE_E_COYOTE; SameSite=Lax">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{same_site => strict},
- <<"Customer=WILE_E_COYOTE; Version=1; SameSite=Strict">>},
+ <<"Customer=WILE_E_COYOTE; SameSite=Strict">>},
{<<"Customer">>, <<"WILE_E_COYOTE">>,
#{path => <<"/acme">>, badoption => <<"negatory">>},
- <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>}
+ <<"Customer=WILE_E_COYOTE; Path=/acme">>}
],
[{R, fun() -> R = iolist_to_binary(setcookie(N, V, O)) end}
|| {N, V, O, R} <- Tests].
@@ -391,7 +421,6 @@ setcookie_max_age_test() ->
setcookie(N, V, O)), <<";">>, [global])
end,
[<<"Customer=WILE_E_COYOTE">>,
- <<" Version=1">>,
<<" Expires=", _/binary>>,
<<" Max-Age=111">>,
<<" Secure">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>,
@@ -400,7 +429,6 @@ setcookie_max_age_test() ->
{'EXIT', {{badarg, {max_age, -111}}, _}} -> ok
end,
[<<"Customer=WILE_E_COYOTE">>,
- <<" Version=1">>,
<<" Expires=", _/binary>>,
<<" Max-Age=86417">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>,
#{max_age => 86417}),
diff --git a/src/cow_date.erl b/src/cow_date.erl
index 36ce861..00bc8af 100644
--- a/src/cow_date.erl
+++ b/src/cow_date.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_hpack.erl b/src/cow_hpack.erl
index 4a02d79..cddeb06 100644
--- a/src/cow_hpack.erl
+++ b/src/cow_hpack.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2015-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2015-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -40,13 +40,17 @@
-opaque state() :: #state{}.
-export_type([state/0]).
--type opts() :: map().
--export_type([opts/0]).
+-type encoder_opts() :: #{
+ huffman => boolean()
+}.
+-export_type([encoder_opts/0]).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-endif.
+-include("cow_hpack_common.hrl").
+
%% State initialization.
-spec init() -> state().
@@ -182,22 +186,6 @@ dec_lit_no_index(Rest, State, Acc, Name) ->
%% @todo Literal header field never indexed.
-%% Decode an integer.
-
-%% The HPACK format has 4 different integer prefixes length (from 4 to 7)
-%% and each can be used to create an indefinite length integer if all bits
-%% of the prefix are set to 1.
-
-dec_int5(<< 2#11111:5, Rest/bits >>) ->
- dec_big_int(Rest, 31, 0);
-dec_int5(<< Int:5, Rest/bits >>) ->
- {Int, Rest}.
-
-dec_big_int(<< 0:1, Value:7, Rest/bits >>, Int, M) ->
- {Int + (Value bsl M), Rest};
-dec_big_int(<< 1:1, Value:7, Rest/bits >>, Int, M) ->
- dec_big_int(Rest, Int + (Value bsl M), M + 7).
-
%% Decode a string.
dec_str(<<0:1, 2#1111111:7, Rest0/bits>>) ->
@@ -213,41 +201,6 @@ dec_str(<<1:1, 2#1111111:7, Rest0/bits>>) ->
dec_str(<<1:1, Length:7, Rest/bits>>) ->
dec_huffman(Rest, Length, 0, <<>>).
-%% We use a lookup table that allows us to benefit from
-%% the binary match context optimization. A more naive
-%% implementation using bit pattern matching cannot reuse
-%% a match context because it wouldn't always match on
-%% byte boundaries.
-%%
-%% See cow_hpack_dec_huffman_lookup.hrl for more details.
-
-dec_huffman(<<A:4, B:4, R/bits>>, Len, Huff0, Acc) when Len > 1 ->
- {_, CharA, Huff1} = dec_huffman_lookup(Huff0, A),
- {_, CharB, Huff} = dec_huffman_lookup(Huff1, B),
- case {CharA, CharB} of
- {undefined, undefined} -> dec_huffman(R, Len - 1, Huff, Acc);
- {CharA, undefined} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA>>);
- {undefined, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharB>>);
- {CharA, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA, CharB>>)
- end;
-dec_huffman(<<A:4, B:4, Rest/bits>>, 1, Huff0, Acc) ->
- {_, CharA, Huff} = dec_huffman_lookup(Huff0, A),
- {ok, CharB, _} = dec_huffman_lookup(Huff, B),
- case {CharA, CharB} of
- %% {undefined, undefined} (> 7-bit final padding) is rejected with a crash.
- {CharA, undefined} ->
- {<<Acc/binary, CharA>>, Rest};
- {undefined, CharB} ->
- {<<Acc/binary, CharB>>, Rest};
- _ ->
- {<<Acc/binary, CharA, CharB>>, Rest}
- end;
-%% Can only be reached when the string length to decode is 0.
-dec_huffman(Rest, 0, _, <<>>) ->
- {<<>>, Rest}.
-
--include("cow_hpack_dec_huffman_lookup.hrl").
-
-ifdef(TEST).
%% Test case extracted from h2spec.
decode_reject_eos_test() ->
@@ -530,7 +483,8 @@ encode(Headers, State0=#state{configured_max_size=MaxSize}) ->
{Data, State} = encode(Headers, State1, huffman, []),
{[enc_int5(MaxSize, 2#001)|Data], State}.
--spec encode(cow_http:headers(), State, opts()) -> {iodata(), State} when State::state().
+-spec encode(cow_http:headers(), State, encoder_opts())
+ -> {iodata(), State} when State::state().
encode(Headers, State=#state{max_size=MaxSize, configured_max_size=MaxSize}, Opts) ->
encode(Headers, State, huffman_opt(Opts), []);
encode(Headers, State0=#state{configured_max_size=MaxSize}, Opts) ->
@@ -569,304 +523,6 @@ encode([{Name, Value0}|Tail], State, HuffmanOpt, Acc) ->
[[<< 0:1, 1:1, 0:6 >>|[enc_str(Name, HuffmanOpt)|enc_str(Value, HuffmanOpt)]]|Acc])
end.
-%% Encode an integer.
-
-enc_int5(Int, Prefix) when Int < 31 ->
- << Prefix:3, Int:5 >>;
-enc_int5(Int, Prefix) ->
- enc_big_int(Int - 31, << Prefix:3, 2#11111:5 >>).
-
-enc_int6(Int, Prefix) when Int < 63 ->
- << Prefix:2, Int:6 >>;
-enc_int6(Int, Prefix) ->
- enc_big_int(Int - 63, << Prefix:2, 2#111111:6 >>).
-
-enc_int7(Int, Prefix) when Int < 127 ->
- << Prefix:1, Int:7 >>;
-enc_int7(Int, Prefix) ->
- enc_big_int(Int - 127, << Prefix:1, 2#1111111:7 >>).
-
-enc_big_int(Int, Acc) when Int < 128 ->
- <<Acc/binary, Int:8>>;
-enc_big_int(Int, Acc) ->
- enc_big_int(Int bsr 7, <<Acc/binary, 1:1, Int:7>>).
-
-%% Encode a string.
-
-enc_str(Str, huffman) ->
- Str2 = enc_huffman(Str, <<>>),
- [enc_int7(byte_size(Str2), 2#1)|Str2];
-enc_str(Str, no_huffman) ->
- [enc_int7(byte_size(Str), 2#0)|Str].
-
-enc_huffman(<<>>, Acc) ->
- case bit_size(Acc) rem 8 of
- 1 -> << Acc/bits, 2#1111111:7 >>;
- 2 -> << Acc/bits, 2#111111:6 >>;
- 3 -> << Acc/bits, 2#11111:5 >>;
- 4 -> << Acc/bits, 2#1111:4 >>;
- 5 -> << Acc/bits, 2#111:3 >>;
- 6 -> << Acc/bits, 2#11:2 >>;
- 7 -> << Acc/bits, 2#1:1 >>;
- 0 -> Acc
- end;
-enc_huffman(<< 0, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111000:13 >>);
-enc_huffman(<< 1, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011000:23 >>);
-enc_huffman(<< 2, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100010:28 >>);
-enc_huffman(<< 3, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100011:28 >>);
-enc_huffman(<< 4, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100100:28 >>);
-enc_huffman(<< 5, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100101:28 >>);
-enc_huffman(<< 6, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100110:28 >>);
-enc_huffman(<< 7, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100111:28 >>);
-enc_huffman(<< 8, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101000:28 >>);
-enc_huffman(<< 9, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101010:24 >>);
-enc_huffman(<< 10, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111100:30 >>);
-enc_huffman(<< 11, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101001:28 >>);
-enc_huffman(<< 12, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101010:28 >>);
-enc_huffman(<< 13, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111101:30 >>);
-enc_huffman(<< 14, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101011:28 >>);
-enc_huffman(<< 15, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101100:28 >>);
-enc_huffman(<< 16, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101101:28 >>);
-enc_huffman(<< 17, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101110:28 >>);
-enc_huffman(<< 18, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101111:28 >>);
-enc_huffman(<< 19, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110000:28 >>);
-enc_huffman(<< 20, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110001:28 >>);
-enc_huffman(<< 21, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110010:28 >>);
-enc_huffman(<< 22, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111110:30 >>);
-enc_huffman(<< 23, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110011:28 >>);
-enc_huffman(<< 24, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110100:28 >>);
-enc_huffman(<< 25, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110101:28 >>);
-enc_huffman(<< 26, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110110:28 >>);
-enc_huffman(<< 27, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110111:28 >>);
-enc_huffman(<< 28, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111000:28 >>);
-enc_huffman(<< 29, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111001:28 >>);
-enc_huffman(<< 30, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111010:28 >>);
-enc_huffman(<< 31, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111011:28 >>);
-enc_huffman(<< 32, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010100:6 >>);
-enc_huffman(<< 33, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111000:10 >>);
-enc_huffman(<< 34, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111001:10 >>);
-enc_huffman(<< 35, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111010:12 >>);
-enc_huffman(<< 36, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111001:13 >>);
-enc_huffman(<< 37, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010101:6 >>);
-enc_huffman(<< 38, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111000:8 >>);
-enc_huffman(<< 39, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111010:11 >>);
-enc_huffman(<< 40, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111010:10 >>);
-enc_huffman(<< 41, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111011:10 >>);
-enc_huffman(<< 42, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111001:8 >>);
-enc_huffman(<< 43, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111011:11 >>);
-enc_huffman(<< 44, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111010:8 >>);
-enc_huffman(<< 45, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010110:6 >>);
-enc_huffman(<< 46, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010111:6 >>);
-enc_huffman(<< 47, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011000:6 >>);
-enc_huffman(<< 48, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00000:5 >>);
-enc_huffman(<< 49, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00001:5 >>);
-enc_huffman(<< 50, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00010:5 >>);
-enc_huffman(<< 51, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011001:6 >>);
-enc_huffman(<< 52, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011010:6 >>);
-enc_huffman(<< 53, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011011:6 >>);
-enc_huffman(<< 54, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011100:6 >>);
-enc_huffman(<< 55, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011101:6 >>);
-enc_huffman(<< 56, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011110:6 >>);
-enc_huffman(<< 57, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011111:6 >>);
-enc_huffman(<< 58, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011100:7 >>);
-enc_huffman(<< 59, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111011:8 >>);
-enc_huffman(<< 60, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111100:15 >>);
-enc_huffman(<< 61, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100000:6 >>);
-enc_huffman(<< 62, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111011:12 >>);
-enc_huffman(<< 63, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111100:10 >>);
-enc_huffman(<< 64, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111010:13 >>);
-enc_huffman(<< 65, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100001:6 >>);
-enc_huffman(<< 66, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011101:7 >>);
-enc_huffman(<< 67, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011110:7 >>);
-enc_huffman(<< 68, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011111:7 >>);
-enc_huffman(<< 69, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100000:7 >>);
-enc_huffman(<< 70, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100001:7 >>);
-enc_huffman(<< 71, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100010:7 >>);
-enc_huffman(<< 72, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100011:7 >>);
-enc_huffman(<< 73, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100100:7 >>);
-enc_huffman(<< 74, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100101:7 >>);
-enc_huffman(<< 75, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100110:7 >>);
-enc_huffman(<< 76, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100111:7 >>);
-enc_huffman(<< 77, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101000:7 >>);
-enc_huffman(<< 78, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101001:7 >>);
-enc_huffman(<< 79, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101010:7 >>);
-enc_huffman(<< 80, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101011:7 >>);
-enc_huffman(<< 81, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101100:7 >>);
-enc_huffman(<< 82, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101101:7 >>);
-enc_huffman(<< 83, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101110:7 >>);
-enc_huffman(<< 84, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101111:7 >>);
-enc_huffman(<< 85, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110000:7 >>);
-enc_huffman(<< 86, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110001:7 >>);
-enc_huffman(<< 87, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110010:7 >>);
-enc_huffman(<< 88, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111100:8 >>);
-enc_huffman(<< 89, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110011:7 >>);
-enc_huffman(<< 90, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111101:8 >>);
-enc_huffman(<< 91, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111011:13 >>);
-enc_huffman(<< 92, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110000:19 >>);
-enc_huffman(<< 93, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111100:13 >>);
-enc_huffman(<< 94, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111100:14 >>);
-enc_huffman(<< 95, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100010:6 >>);
-enc_huffman(<< 96, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111101:15 >>);
-enc_huffman(<< 97, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00011:5 >>);
-enc_huffman(<< 98, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100011:6 >>);
-enc_huffman(<< 99, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00100:5 >>);
-enc_huffman(<< 100, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100100:6 >>);
-enc_huffman(<< 101, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00101:5 >>);
-enc_huffman(<< 102, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100101:6 >>);
-enc_huffman(<< 103, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100110:6 >>);
-enc_huffman(<< 104, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100111:6 >>);
-enc_huffman(<< 105, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00110:5 >>);
-enc_huffman(<< 106, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110100:7 >>);
-enc_huffman(<< 107, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110101:7 >>);
-enc_huffman(<< 108, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101000:6 >>);
-enc_huffman(<< 109, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101001:6 >>);
-enc_huffman(<< 110, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101010:6 >>);
-enc_huffman(<< 111, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00111:5 >>);
-enc_huffman(<< 112, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101011:6 >>);
-enc_huffman(<< 113, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110110:7 >>);
-enc_huffman(<< 114, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101100:6 >>);
-enc_huffman(<< 115, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01000:5 >>);
-enc_huffman(<< 116, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01001:5 >>);
-enc_huffman(<< 117, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101101:6 >>);
-enc_huffman(<< 118, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110111:7 >>);
-enc_huffman(<< 119, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111000:7 >>);
-enc_huffman(<< 120, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111001:7 >>);
-enc_huffman(<< 121, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111010:7 >>);
-enc_huffman(<< 122, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111011:7 >>);
-enc_huffman(<< 123, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111110:15 >>);
-enc_huffman(<< 124, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111100:11 >>);
-enc_huffman(<< 125, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111101:14 >>);
-enc_huffman(<< 126, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111101:13 >>);
-enc_huffman(<< 127, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111100:28 >>);
-enc_huffman(<< 128, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100110:20 >>);
-enc_huffman(<< 129, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010010:22 >>);
-enc_huffman(<< 130, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100111:20 >>);
-enc_huffman(<< 131, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101000:20 >>);
-enc_huffman(<< 132, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010011:22 >>);
-enc_huffman(<< 133, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010100:22 >>);
-enc_huffman(<< 134, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010101:22 >>);
-enc_huffman(<< 135, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011001:23 >>);
-enc_huffman(<< 136, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010110:22 >>);
-enc_huffman(<< 137, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011010:23 >>);
-enc_huffman(<< 138, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011011:23 >>);
-enc_huffman(<< 139, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011100:23 >>);
-enc_huffman(<< 140, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011101:23 >>);
-enc_huffman(<< 141, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011110:23 >>);
-enc_huffman(<< 142, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101011:24 >>);
-enc_huffman(<< 143, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011111:23 >>);
-enc_huffman(<< 144, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101100:24 >>);
-enc_huffman(<< 145, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101101:24 >>);
-enc_huffman(<< 146, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010111:22 >>);
-enc_huffman(<< 147, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100000:23 >>);
-enc_huffman(<< 148, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101110:24 >>);
-enc_huffman(<< 149, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100001:23 >>);
-enc_huffman(<< 150, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100010:23 >>);
-enc_huffman(<< 151, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100011:23 >>);
-enc_huffman(<< 152, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100100:23 >>);
-enc_huffman(<< 153, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011100:21 >>);
-enc_huffman(<< 154, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011000:22 >>);
-enc_huffman(<< 155, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100101:23 >>);
-enc_huffman(<< 156, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011001:22 >>);
-enc_huffman(<< 157, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100110:23 >>);
-enc_huffman(<< 158, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100111:23 >>);
-enc_huffman(<< 159, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101111:24 >>);
-enc_huffman(<< 160, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011010:22 >>);
-enc_huffman(<< 161, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011101:21 >>);
-enc_huffman(<< 162, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101001:20 >>);
-enc_huffman(<< 163, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011011:22 >>);
-enc_huffman(<< 164, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011100:22 >>);
-enc_huffman(<< 165, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101000:23 >>);
-enc_huffman(<< 166, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101001:23 >>);
-enc_huffman(<< 167, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011110:21 >>);
-enc_huffman(<< 168, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101010:23 >>);
-enc_huffman(<< 169, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011101:22 >>);
-enc_huffman(<< 170, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011110:22 >>);
-enc_huffman(<< 171, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110000:24 >>);
-enc_huffman(<< 172, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011111:21 >>);
-enc_huffman(<< 173, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011111:22 >>);
-enc_huffman(<< 174, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101011:23 >>);
-enc_huffman(<< 175, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101100:23 >>);
-enc_huffman(<< 176, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100000:21 >>);
-enc_huffman(<< 177, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100001:21 >>);
-enc_huffman(<< 178, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100000:22 >>);
-enc_huffman(<< 179, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100010:21 >>);
-enc_huffman(<< 180, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101101:23 >>);
-enc_huffman(<< 181, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100001:22 >>);
-enc_huffman(<< 182, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101110:23 >>);
-enc_huffman(<< 183, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101111:23 >>);
-enc_huffman(<< 184, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101010:20 >>);
-enc_huffman(<< 185, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100010:22 >>);
-enc_huffman(<< 186, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100011:22 >>);
-enc_huffman(<< 187, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100100:22 >>);
-enc_huffman(<< 188, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110000:23 >>);
-enc_huffman(<< 189, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100101:22 >>);
-enc_huffman(<< 190, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100110:22 >>);
-enc_huffman(<< 191, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110001:23 >>);
-enc_huffman(<< 192, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100000:26 >>);
-enc_huffman(<< 193, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100001:26 >>);
-enc_huffman(<< 194, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101011:20 >>);
-enc_huffman(<< 195, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110001:19 >>);
-enc_huffman(<< 196, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100111:22 >>);
-enc_huffman(<< 197, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110010:23 >>);
-enc_huffman(<< 198, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101000:22 >>);
-enc_huffman(<< 199, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101100:25 >>);
-enc_huffman(<< 200, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100010:26 >>);
-enc_huffman(<< 201, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100011:26 >>);
-enc_huffman(<< 202, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100100:26 >>);
-enc_huffman(<< 203, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011110:27 >>);
-enc_huffman(<< 204, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011111:27 >>);
-enc_huffman(<< 205, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100101:26 >>);
-enc_huffman(<< 206, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110001:24 >>);
-enc_huffman(<< 207, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101101:25 >>);
-enc_huffman(<< 208, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110010:19 >>);
-enc_huffman(<< 209, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100011:21 >>);
-enc_huffman(<< 210, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100110:26 >>);
-enc_huffman(<< 211, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100000:27 >>);
-enc_huffman(<< 212, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100001:27 >>);
-enc_huffman(<< 213, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100111:26 >>);
-enc_huffman(<< 214, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100010:27 >>);
-enc_huffman(<< 215, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110010:24 >>);
-enc_huffman(<< 216, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100100:21 >>);
-enc_huffman(<< 217, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100101:21 >>);
-enc_huffman(<< 218, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101000:26 >>);
-enc_huffman(<< 219, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101001:26 >>);
-enc_huffman(<< 220, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111101:28 >>);
-enc_huffman(<< 221, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100011:27 >>);
-enc_huffman(<< 222, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100100:27 >>);
-enc_huffman(<< 223, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100101:27 >>);
-enc_huffman(<< 224, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101100:20 >>);
-enc_huffman(<< 225, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110011:24 >>);
-enc_huffman(<< 226, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101101:20 >>);
-enc_huffman(<< 227, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100110:21 >>);
-enc_huffman(<< 228, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101001:22 >>);
-enc_huffman(<< 229, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100111:21 >>);
-enc_huffman(<< 230, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111101000:21 >>);
-enc_huffman(<< 231, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110011:23 >>);
-enc_huffman(<< 232, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101010:22 >>);
-enc_huffman(<< 233, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101011:22 >>);
-enc_huffman(<< 234, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101110:25 >>);
-enc_huffman(<< 235, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101111:25 >>);
-enc_huffman(<< 236, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110100:24 >>);
-enc_huffman(<< 237, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110101:24 >>);
-enc_huffman(<< 238, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101010:26 >>);
-enc_huffman(<< 239, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110100:23 >>);
-enc_huffman(<< 240, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101011:26 >>);
-enc_huffman(<< 241, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100110:27 >>);
-enc_huffman(<< 242, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101100:26 >>);
-enc_huffman(<< 243, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101101:26 >>);
-enc_huffman(<< 244, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100111:27 >>);
-enc_huffman(<< 245, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101000:27 >>);
-enc_huffman(<< 246, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101001:27 >>);
-enc_huffman(<< 247, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101010:27 >>);
-enc_huffman(<< 248, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101011:27 >>);
-enc_huffman(<< 249, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111110:28 >>);
-enc_huffman(<< 250, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101100:27 >>);
-enc_huffman(<< 251, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101101:27 >>);
-enc_huffman(<< 252, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101110:27 >>);
-enc_huffman(<< 253, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101111:27 >>);
-enc_huffman(<< 254, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111110000:27 >>);
-enc_huffman(<< 255, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101110:26 >>).
-
-ifdef(TEST).
req_encode_test() ->
%% First request (raw then huffman).
diff --git a/src/cow_hpack_common.hrl b/src/cow_hpack_common.hrl
new file mode 100644
index 0000000..92f9514
--- /dev/null
+++ b/src/cow_hpack_common.hrl
@@ -0,0 +1,376 @@
+%% Copyright (c) 2015-2020, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The prefixed integer and the string formats are common
+%% to both HPACK and QPACK. They are included directly in
+%% each module in order to avoid fully-qualified calls and
+%% slightly improve performance.
+%%
+%% Some functions are only used in one or the other even
+%% though the format is the same. In that case the functions
+%% can be found in the relevant module.
+%%
+%% Any tests relevant to these functions should be added to
+%% cow_hpack since HPACK is where these originate from.
+
+%% Prefix decoding.
+%%
+%% The HPACK format has 4 different integer prefixes length (from 4 to 7)
+%% and each can be used to create an indefinite length integer if all bits
+%% of the prefix are set to 1.
+
+dec_int5(<<2#11111:5, Rest/bits>>) ->
+ dec_big_int(Rest, 31, 0);
+dec_int5(<<Int:5, Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_big_int(<<0:1, Value:7, Rest/bits>>, Int, M) ->
+ {Int + (Value bsl M), Rest};
+dec_big_int(<<1:1, Value:7, Rest/bits>>, Int, M) ->
+ dec_big_int(Rest, Int + (Value bsl M), M + 7).
+
+%% Prefix encoding.
+
+enc_int5(Int, Prefix) when Int < 31 ->
+ <<Prefix:3, Int:5>>;
+enc_int5(Int, Prefix) ->
+ enc_big_int(Int - 31, <<Prefix:3, 2#11111:5>>).
+
+enc_int6(Int, Prefix) when Int < 63 ->
+ <<Prefix:2, Int:6>>;
+enc_int6(Int, Prefix) ->
+ enc_big_int(Int - 63, <<Prefix:2, 2#111111:6>>).
+
+enc_int7(Int, Prefix) when Int < 127 ->
+ <<Prefix:1, Int:7>>;
+enc_int7(Int, Prefix) ->
+ enc_big_int(Int - 127, <<Prefix:1, 2#1111111:7>>).
+
+enc_big_int(Int, Acc) when Int < 128 ->
+ <<Acc/binary, Int:8>>;
+enc_big_int(Int, Acc) ->
+ enc_big_int(Int bsr 7, <<Acc/binary, 1:1, Int:7>>).
+
+%% String decoding.
+%%
+%% We use a lookup table that allows us to benefit from
+%% the binary match context optimization. A more naive
+%% implementation using bit pattern matching cannot reuse
+%% a match context because it wouldn't always match on
+%% byte boundaries.
+%%
+%% See cow_hpack_dec_huffman_lookup.hrl for more details.
+
+dec_huffman(<<A:4, B:4, R/bits>>, Len, Huff0, Acc) when Len > 1 ->
+ {_, CharA, Huff1} = dec_huffman_lookup(Huff0, A),
+ {_, CharB, Huff} = dec_huffman_lookup(Huff1, B),
+ case {CharA, CharB} of
+ {undefined, undefined} -> dec_huffman(R, Len - 1, Huff, Acc);
+ {CharA, undefined} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA>>);
+ {undefined, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharB>>);
+ {CharA, CharB} -> dec_huffman(R, Len - 1, Huff, <<Acc/binary, CharA, CharB>>)
+ end;
+dec_huffman(<<A:4, B:4, Rest/bits>>, 1, Huff0, Acc) ->
+ {_, CharA, Huff} = dec_huffman_lookup(Huff0, A),
+ {ok, CharB, _} = dec_huffman_lookup(Huff, B),
+ case {CharA, CharB} of
+ %% {undefined, undefined} (> 7-bit final padding) is rejected with a crash.
+ {CharA, undefined} ->
+ {<<Acc/binary, CharA>>, Rest};
+ {undefined, CharB} ->
+ {<<Acc/binary, CharB>>, Rest};
+ _ ->
+ {<<Acc/binary, CharA, CharB>>, Rest}
+ end;
+%% Can only be reached when the string length to decode is 0.
+dec_huffman(Rest, 0, _, <<>>) ->
+ {<<>>, Rest}.
+
+-include("cow_hpack_dec_huffman_lookup.hrl").
+
+%% String encoding.
+
+enc_str(Str, huffman) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int7(byte_size(Str2), 2#1)|Str2];
+enc_str(Str, no_huffman) ->
+ [enc_int7(byte_size(Str), 2#0)|Str].
+
+enc_huffman(<<>>, Acc) ->
+ case bit_size(Acc) rem 8 of
+ 1 -> <<Acc/bits, 2#1111111:7>>;
+ 2 -> <<Acc/bits, 2#111111:6>>;
+ 3 -> <<Acc/bits, 2#11111:5>>;
+ 4 -> <<Acc/bits, 2#1111:4>>;
+ 5 -> <<Acc/bits, 2#111:3>>;
+ 6 -> <<Acc/bits, 2#11:2>>;
+ 7 -> <<Acc/bits, 2#1:1>>;
+ 0 -> Acc
+ end;
+enc_huffman(<<0, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111000:13>>);
+enc_huffman(<<1, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011000:23>>);
+enc_huffman(<<2, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100010:28>>);
+enc_huffman(<<3, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100011:28>>);
+enc_huffman(<<4, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100100:28>>);
+enc_huffman(<<5, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100101:28>>);
+enc_huffman(<<6, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100110:28>>);
+enc_huffman(<<7, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111100111:28>>);
+enc_huffman(<<8, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101000:28>>);
+enc_huffman(<<9, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101010:24>>);
+enc_huffman(<<10, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111100:30>>);
+enc_huffman(<<11, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101001:28>>);
+enc_huffman(<<12, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101010:28>>);
+enc_huffman(<<13, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111101:30>>);
+enc_huffman(<<14, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101011:28>>);
+enc_huffman(<<15, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101100:28>>);
+enc_huffman(<<16, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101101:28>>);
+enc_huffman(<<17, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101110:28>>);
+enc_huffman(<<18, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111101111:28>>);
+enc_huffman(<<19, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110000:28>>);
+enc_huffman(<<20, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110001:28>>);
+enc_huffman(<<21, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110010:28>>);
+enc_huffman(<<22, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111111111110:30>>);
+enc_huffman(<<23, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110011:28>>);
+enc_huffman(<<24, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110100:28>>);
+enc_huffman(<<25, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110101:28>>);
+enc_huffman(<<26, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110110:28>>);
+enc_huffman(<<27, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111110111:28>>);
+enc_huffman(<<28, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111000:28>>);
+enc_huffman(<<29, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111001:28>>);
+enc_huffman(<<30, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111010:28>>);
+enc_huffman(<<31, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111011:28>>);
+enc_huffman(<<32, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010100:6>>);
+enc_huffman(<<33, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111000:10>>);
+enc_huffman(<<34, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111001:10>>);
+enc_huffman(<<35, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111010:12>>);
+enc_huffman(<<36, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111001:13>>);
+enc_huffman(<<37, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010101:6>>);
+enc_huffman(<<38, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111000:8>>);
+enc_huffman(<<39, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111010:11>>);
+enc_huffman(<<40, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111010:10>>);
+enc_huffman(<<41, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111011:10>>);
+enc_huffman(<<42, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111001:8>>);
+enc_huffman(<<43, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111011:11>>);
+enc_huffman(<<44, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111010:8>>);
+enc_huffman(<<45, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010110:6>>);
+enc_huffman(<<46, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#010111:6>>);
+enc_huffman(<<47, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011000:6>>);
+enc_huffman(<<48, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00000:5>>);
+enc_huffman(<<49, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00001:5>>);
+enc_huffman(<<50, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00010:5>>);
+enc_huffman(<<51, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011001:6>>);
+enc_huffman(<<52, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011010:6>>);
+enc_huffman(<<53, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011011:6>>);
+enc_huffman(<<54, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011100:6>>);
+enc_huffman(<<55, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011101:6>>);
+enc_huffman(<<56, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011110:6>>);
+enc_huffman(<<57, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#011111:6>>);
+enc_huffman(<<58, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011100:7>>);
+enc_huffman(<<59, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111011:8>>);
+enc_huffman(<<60, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111100:15>>);
+enc_huffman(<<61, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100000:6>>);
+enc_huffman(<<62, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111011:12>>);
+enc_huffman(<<63, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111100:10>>);
+enc_huffman(<<64, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111010:13>>);
+enc_huffman(<<65, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100001:6>>);
+enc_huffman(<<66, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011101:7>>);
+enc_huffman(<<67, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011110:7>>);
+enc_huffman(<<68, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1011111:7>>);
+enc_huffman(<<69, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100000:7>>);
+enc_huffman(<<70, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100001:7>>);
+enc_huffman(<<71, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100010:7>>);
+enc_huffman(<<72, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100011:7>>);
+enc_huffman(<<73, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100100:7>>);
+enc_huffman(<<74, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100101:7>>);
+enc_huffman(<<75, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100110:7>>);
+enc_huffman(<<76, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1100111:7>>);
+enc_huffman(<<77, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101000:7>>);
+enc_huffman(<<78, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101001:7>>);
+enc_huffman(<<79, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101010:7>>);
+enc_huffman(<<80, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101011:7>>);
+enc_huffman(<<81, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101100:7>>);
+enc_huffman(<<82, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101101:7>>);
+enc_huffman(<<83, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101110:7>>);
+enc_huffman(<<84, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1101111:7>>);
+enc_huffman(<<85, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110000:7>>);
+enc_huffman(<<86, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110001:7>>);
+enc_huffman(<<87, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110010:7>>);
+enc_huffman(<<88, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111100:8>>);
+enc_huffman(<<89, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110011:7>>);
+enc_huffman(<<90, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111101:8>>);
+enc_huffman(<<91, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111011:13>>);
+enc_huffman(<<92, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110000:19>>);
+enc_huffman(<<93, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111100:13>>);
+enc_huffman(<<94, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111100:14>>);
+enc_huffman(<<95, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100010:6>>);
+enc_huffman(<<96, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111101:15>>);
+enc_huffman(<<97, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00011:5>>);
+enc_huffman(<<98, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100011:6>>);
+enc_huffman(<<99, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00100:5>>);
+enc_huffman(<<100, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100100:6>>);
+enc_huffman(<<101, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00101:5>>);
+enc_huffman(<<102, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100101:6>>);
+enc_huffman(<<103, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100110:6>>);
+enc_huffman(<<104, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#100111:6>>);
+enc_huffman(<<105, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00110:5>>);
+enc_huffman(<<106, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110100:7>>);
+enc_huffman(<<107, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110101:7>>);
+enc_huffman(<<108, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101000:6>>);
+enc_huffman(<<109, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101001:6>>);
+enc_huffman(<<110, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101010:6>>);
+enc_huffman(<<111, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#00111:5>>);
+enc_huffman(<<112, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101011:6>>);
+enc_huffman(<<113, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110110:7>>);
+enc_huffman(<<114, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101100:6>>);
+enc_huffman(<<115, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#01000:5>>);
+enc_huffman(<<116, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#01001:5>>);
+enc_huffman(<<117, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#101101:6>>);
+enc_huffman(<<118, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1110111:7>>);
+enc_huffman(<<119, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111000:7>>);
+enc_huffman(<<120, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111001:7>>);
+enc_huffman(<<121, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111010:7>>);
+enc_huffman(<<122, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111011:7>>);
+enc_huffman(<<123, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111110:15>>);
+enc_huffman(<<124, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111100:11>>);
+enc_huffman(<<125, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111101:14>>);
+enc_huffman(<<126, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111101:13>>);
+enc_huffman(<<127, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111100:28>>);
+enc_huffman(<<128, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111100110:20>>);
+enc_huffman(<<129, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010010:22>>);
+enc_huffman(<<130, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111100111:20>>);
+enc_huffman(<<131, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101000:20>>);
+enc_huffman(<<132, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010011:22>>);
+enc_huffman(<<133, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010100:22>>);
+enc_huffman(<<134, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010101:22>>);
+enc_huffman(<<135, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011001:23>>);
+enc_huffman(<<136, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010110:22>>);
+enc_huffman(<<137, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011010:23>>);
+enc_huffman(<<138, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011011:23>>);
+enc_huffman(<<139, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011100:23>>);
+enc_huffman(<<140, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011101:23>>);
+enc_huffman(<<141, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011110:23>>);
+enc_huffman(<<142, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101011:24>>);
+enc_huffman(<<143, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111011111:23>>);
+enc_huffman(<<144, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101100:24>>);
+enc_huffman(<<145, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101101:24>>);
+enc_huffman(<<146, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111010111:22>>);
+enc_huffman(<<147, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100000:23>>);
+enc_huffman(<<148, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101110:24>>);
+enc_huffman(<<149, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100001:23>>);
+enc_huffman(<<150, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100010:23>>);
+enc_huffman(<<151, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100011:23>>);
+enc_huffman(<<152, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100100:23>>);
+enc_huffman(<<153, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011100:21>>);
+enc_huffman(<<154, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011000:22>>);
+enc_huffman(<<155, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100101:23>>);
+enc_huffman(<<156, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011001:22>>);
+enc_huffman(<<157, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100110:23>>);
+enc_huffman(<<158, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111100111:23>>);
+enc_huffman(<<159, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111101111:24>>);
+enc_huffman(<<160, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011010:22>>);
+enc_huffman(<<161, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011101:21>>);
+enc_huffman(<<162, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101001:20>>);
+enc_huffman(<<163, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011011:22>>);
+enc_huffman(<<164, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011100:22>>);
+enc_huffman(<<165, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101000:23>>);
+enc_huffman(<<166, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101001:23>>);
+enc_huffman(<<167, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011110:21>>);
+enc_huffman(<<168, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101010:23>>);
+enc_huffman(<<169, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011101:22>>);
+enc_huffman(<<170, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011110:22>>);
+enc_huffman(<<171, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110000:24>>);
+enc_huffman(<<172, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111011111:21>>);
+enc_huffman(<<173, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111011111:22>>);
+enc_huffman(<<174, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101011:23>>);
+enc_huffman(<<175, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101100:23>>);
+enc_huffman(<<176, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100000:21>>);
+enc_huffman(<<177, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100001:21>>);
+enc_huffman(<<178, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100000:22>>);
+enc_huffman(<<179, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100010:21>>);
+enc_huffman(<<180, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101101:23>>);
+enc_huffman(<<181, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100001:22>>);
+enc_huffman(<<182, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101110:23>>);
+enc_huffman(<<183, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111101111:23>>);
+enc_huffman(<<184, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101010:20>>);
+enc_huffman(<<185, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100010:22>>);
+enc_huffman(<<186, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100011:22>>);
+enc_huffman(<<187, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100100:22>>);
+enc_huffman(<<188, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110000:23>>);
+enc_huffman(<<189, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100101:22>>);
+enc_huffman(<<190, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100110:22>>);
+enc_huffman(<<191, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110001:23>>);
+enc_huffman(<<192, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100000:26>>);
+enc_huffman(<<193, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100001:26>>);
+enc_huffman(<<194, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101011:20>>);
+enc_huffman(<<195, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110001:19>>);
+enc_huffman(<<196, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111100111:22>>);
+enc_huffman(<<197, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110010:23>>);
+enc_huffman(<<198, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101000:22>>);
+enc_huffman(<<199, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101100:25>>);
+enc_huffman(<<200, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100010:26>>);
+enc_huffman(<<201, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100011:26>>);
+enc_huffman(<<202, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100100:26>>);
+enc_huffman(<<203, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111011110:27>>);
+enc_huffman(<<204, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111011111:27>>);
+enc_huffman(<<205, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100101:26>>);
+enc_huffman(<<206, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110001:24>>);
+enc_huffman(<<207, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101101:25>>);
+enc_huffman(<<208, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111110010:19>>);
+enc_huffman(<<209, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100011:21>>);
+enc_huffman(<<210, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100110:26>>);
+enc_huffman(<<211, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100000:27>>);
+enc_huffman(<<212, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100001:27>>);
+enc_huffman(<<213, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111100111:26>>);
+enc_huffman(<<214, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100010:27>>);
+enc_huffman(<<215, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110010:24>>);
+enc_huffman(<<216, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100100:21>>);
+enc_huffman(<<217, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100101:21>>);
+enc_huffman(<<218, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101000:26>>);
+enc_huffman(<<219, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101001:26>>);
+enc_huffman(<<220, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111101:28>>);
+enc_huffman(<<221, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100011:27>>);
+enc_huffman(<<222, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100100:27>>);
+enc_huffman(<<223, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100101:27>>);
+enc_huffman(<<224, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101100:20>>);
+enc_huffman(<<225, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110011:24>>);
+enc_huffman(<<226, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111101101:20>>);
+enc_huffman(<<227, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100110:21>>);
+enc_huffman(<<228, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101001:22>>);
+enc_huffman(<<229, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111100111:21>>);
+enc_huffman(<<230, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111101000:21>>);
+enc_huffman(<<231, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110011:23>>);
+enc_huffman(<<232, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101010:22>>);
+enc_huffman(<<233, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111101011:22>>);
+enc_huffman(<<234, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101110:25>>);
+enc_huffman(<<235, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111101111:25>>);
+enc_huffman(<<236, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110100:24>>);
+enc_huffman(<<237, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111110101:24>>);
+enc_huffman(<<238, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101010:26>>);
+enc_huffman(<<239, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111110100:23>>);
+enc_huffman(<<240, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101011:26>>);
+enc_huffman(<<241, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100110:27>>);
+enc_huffman(<<242, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101100:26>>);
+enc_huffman(<<243, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101101:26>>);
+enc_huffman(<<244, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111100111:27>>);
+enc_huffman(<<245, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101000:27>>);
+enc_huffman(<<246, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101001:27>>);
+enc_huffman(<<247, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101010:27>>);
+enc_huffman(<<248, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101011:27>>);
+enc_huffman(<<249, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#1111111111111111111111111110:28>>);
+enc_huffman(<<250, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101100:27>>);
+enc_huffman(<<251, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101101:27>>);
+enc_huffman(<<252, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101110:27>>);
+enc_huffman(<<253, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111101111:27>>);
+enc_huffman(<<254, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#111111111111111111111110000:27>>);
+enc_huffman(<<255, R/bits>>, A) -> enc_huffman(R, <<A/bits, 2#11111111111111111111101110:26>>).
diff --git a/src/cow_hpack_dec_huffman_lookup.hrl b/src/cow_hpack_dec_huffman_lookup.hrl
index 5ed4d39..6e5da31 100644
--- a/src/cow_hpack_dec_huffman_lookup.hrl
+++ b/src/cow_hpack_dec_huffman_lookup.hrl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2019, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2019-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_http.erl b/src/cow_http.erl
index bfaace3..b4bc672 100644
--- a/src/cow_http.erl
+++ b/src/cow_http.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -12,23 +12,35 @@
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+%% This module contains functions and types common
+%% to all or most HTTP versions.
-module(cow_http).
+%% The HTTP/1 functions have been moved to cow_http1.
+%% In order to remain backward compatible we redirect
+%% calls to cow_http1. The type version() was moved
+%% and no fallback is provided.
+%%
+%% @todo Remove the aliases in Cowlib 3.0.
-export([parse_request_line/1]).
-export([parse_status_line/1]).
-export([status_to_integer/1]).
-export([parse_headers/1]).
-
-export([parse_fullpath/1]).
-export([parse_version/1]).
-
-export([request/4]).
-export([response/3]).
-export([headers/1]).
-export([version/1]).
--type version() :: 'HTTP/1.0' | 'HTTP/1.1'.
--export_type([version/0]).
+%% Functions used by HTTP/2+.
+
+-export([format_semantic_error/1]).
+-export([merge_pseudo_headers/2]).
+-export([process_headers/5]).
+-export([remove_http1_headers/1]).
+
+%% Types used by all versions of HTTP.
-type status() :: 100..999.
-export_type([status/0]).
@@ -36,391 +48,324 @@
-type headers() :: [{binary(), iodata()}].
-export_type([headers/0]).
--include("cow_inline.hrl").
-
-%% @doc Parse the request line.
-
--spec parse_request_line(binary()) -> {binary(), binary(), version(), binary()}.
-parse_request_line(Data) ->
- {Pos, _} = binary:match(Data, <<"\r">>),
- <<RequestLine:Pos/binary, "\r\n", Rest/bits>> = Data,
- [Method, Target, Version0] = binary:split(RequestLine, <<$\s>>, [trim_all, global]),
- Version = case Version0 of
- <<"HTTP/1.1">> -> 'HTTP/1.1';
- <<"HTTP/1.0">> -> 'HTTP/1.0'
- end,
- {Method, Target, Version, Rest}.
-
--ifdef(TEST).
-parse_request_line_test_() ->
- Tests = [
- {<<"GET /path HTTP/1.0\r\nRest">>,
- {<<"GET">>, <<"/path">>, 'HTTP/1.0', <<"Rest">>}},
- {<<"GET /path HTTP/1.1\r\nRest">>,
- {<<"GET">>, <<"/path">>, 'HTTP/1.1', <<"Rest">>}},
- {<<"CONNECT proxy.example.org:1080 HTTP/1.1\r\nRest">>,
- {<<"CONNECT">>, <<"proxy.example.org:1080">>, 'HTTP/1.1', <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_request_line(V) end}
- || {V, R} <- Tests].
-
-parse_request_line_error_test_() ->
- Tests = [
- <<>>,
- <<"GET">>,
- <<"GET /path\r\n">>,
- <<"GET /path HTTP/1.1">>,
- <<"GET /path HTTP/1.1\r">>,
- <<"GET /path HTTP/1.1\n">>,
- <<"GET /path HTTP/0.9\r\n">>,
- <<"content-type: text/plain\r\n">>,
- <<0:80, "\r\n">>
- ],
- [{V, fun() -> {'EXIT', _} = (catch parse_request_line(V)) end}
- || V <- Tests].
-
-horse_parse_request_line_get_path() ->
- horse:repeat(200000,
- parse_request_line(<<"GET /path HTTP/1.1\r\n">>)
- ).
--endif.
-
-%% @doc Parse the status line.
-
--spec parse_status_line(binary()) -> {version(), status(), binary(), binary()}.
-parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 200, <<"OK">>, Rest};
-parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 404, <<"Not Found">>, Rest};
-parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) ->
- {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest};
-parse_status_line(<< "HTTP/1.1 ", Status/bits >>) ->
- parse_status_line(Status, 'HTTP/1.1');
-parse_status_line(<< "HTTP/1.0 ", Status/bits >>) ->
- parse_status_line(Status, 'HTTP/1.0').
-
-parse_status_line(<<H, T, U, " ", Rest/bits>>, Version) ->
- Status = status_to_integer(H, T, U),
- {Pos, _} = binary:match(Rest, <<"\r">>),
- << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest,
- {Version, Status, StatusStr, Rest2}.
+%% Types used by HTTP/2+.
+
+-type pseudo_headers() :: #{} %% Trailers
+ | #{ %% Responses.
+ status := cow_http:status()
+ } | #{ %% Normal CONNECT requests.
+ method := binary(),
+ authority := binary()
+ } | #{ %% Extended CONNECT requests.
+ method := binary(),
+ scheme := binary(),
+ authority := binary(),
+ path := binary(),
+ protocol := binary()
+ } | #{ %% Other requests.
+ method := binary(),
+ scheme := binary(),
+ authority => binary(),
+ path := binary()
+ }.
+-export_type([pseudo_headers/0]).
+
+-type fin() :: fin | nofin.
+-export_type([fin/0]).
+
+%% HTTP/1 function aliases.
+
+-spec parse_request_line(binary()) -> {binary(), binary(), cow_http1:version(), binary()}.
+parse_request_line(Data) -> cow_http1:parse_request_line(Data).
+
+-spec parse_status_line(binary()) -> {cow_http1:version(), status(), binary(), binary()}.
+parse_status_line(Data) -> cow_http1:parse_status_line(Data).
-spec status_to_integer(status() | binary()) -> status().
-status_to_integer(Status) when is_integer(Status) ->
- Status;
-status_to_integer(Status) ->
- case Status of
- <<H, T, U>> ->
- status_to_integer(H, T, U);
- <<H, T, U, " ", _/bits>> ->
- status_to_integer(H, T, U)
- end.
-
-status_to_integer(H, T, U)
- when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 ->
- (H - $0) * 100 + (T - $0) * 10 + (U - $0).
-
--ifdef(TEST).
-parse_status_line_test_() ->
- Tests = [
- {<<"HTTP/1.1 200 OK\r\nRest">>,
- {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}},
- {<<"HTTP/1.0 404 Not Found\r\nRest">>,
- {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}},
- {<<"HTTP/1.1 500 Something very funny here\r\nRest">>,
- {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}},
- {<<"HTTP/1.1 200 \r\nRest">>,
- {'HTTP/1.1', 200, <<>>, <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_status_line(V) end}
- || {V, R} <- Tests].
-
-parse_status_line_error_test_() ->
- Tests = [
- <<>>,
- <<"HTTP/1.1">>,
- <<"HTTP/1.1 200\r\n">>,
- <<"HTTP/1.1 200 OK">>,
- <<"HTTP/1.1 200 OK\r">>,
- <<"HTTP/1.1 200 OK\n">>,
- <<"HTTP/0.9 200 OK\r\n">>,
- <<"HTTP/1.1 42 Answer\r\n">>,
- <<"HTTP/1.1 999999999 More than OK\r\n">>,
- <<"content-type: text/plain\r\n">>,
- <<0:80, "\r\n">>
- ],
- [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end}
- || V <- Tests].
-
-horse_parse_status_line_200() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 200 OK\r\n">>)
- ).
+status_to_integer(Status) -> cow_http1:status_to_integer(Status).
-horse_parse_status_line_404() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>)
- ).
+-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
+parse_headers(Data) -> cow_http1:parse_headers(Data).
-horse_parse_status_line_500() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>)
- ).
+-spec parse_fullpath(binary()) -> {binary(), binary()}.
+parse_fullpath(Fullpath) -> cow_http1:parse_fullpath(Fullpath).
-horse_parse_status_line_other() ->
- horse:repeat(200000,
- parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>)
- ).
--endif.
+-spec parse_version(binary()) -> cow_http1:version().
+parse_version(Data) -> cow_http1:parse_version(Data).
-%% @doc Parse the list of headers.
+-spec request(binary(), iodata(), cow_http1:version(), headers()) -> iodata().
+request(Method, Path, Version, Headers) -> cow_http1:request(Method, Path, Version, Headers).
--spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
-parse_headers(Data) ->
- parse_header(Data, []).
-
-parse_header(<< $\r, $\n, Rest/bits >>, Acc) ->
- {lists:reverse(Acc), Rest};
-parse_header(Data, Acc) ->
- parse_hd_name(Data, Acc, <<>>).
-
-parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) ->
- case C of
- $: -> parse_hd_before_value(Rest, Acc, SoFar);
- $\s -> parse_hd_name_ws(Rest, Acc, SoFar);
- $\t -> parse_hd_name_ws(Rest, Acc, SoFar);
- _ -> ?LOWER(parse_hd_name, Rest, Acc, SoFar)
- end.
+-spec response(status() | binary(), cow_http1:version(), headers()) -> iodata().
+response(Status, Version, Headers) -> cow_http1:response(Status, Version, Headers).
-parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) ->
- case C of
- $: -> parse_hd_before_value(Rest, Acc, Name);
- $\s -> parse_hd_name_ws(Rest, Acc, Name);
- $\t -> parse_hd_name_ws(Rest, Acc, Name)
+-spec headers(headers()) -> iodata().
+headers(Headers) -> cow_http1:headers(Headers).
+
+-spec version(cow_http1:version()) -> binary().
+version(Version) -> cow_http1:version(Version).
+
+%% Functions used by HTTP/2+.
+
+%% Semantic errors are common to all HTTP versions.
+
+-spec format_semantic_error(atom()) -> atom().
+
+format_semantic_error(connect_invalid_content_length_2xx) ->
+ 'Content-length header received in a 2xx response to a CONNECT request. (RFC7230 3.3.2).';
+format_semantic_error(invalid_content_length_header) ->
+ 'The content-length header is invalid. (RFC7230 3.3.2)';
+format_semantic_error(invalid_content_length_header_1xx) ->
+ 'Content-length header received in a 1xx response. (RFC7230 3.3.2)';
+format_semantic_error(invalid_content_length_header_204) ->
+ 'Content-length header received in a 204 response. (RFC7230 3.3.2)';
+format_semantic_error(multiple_content_length_headers) ->
+ 'Multiple content-length headers were received. (RFC7230 3.3.2)'.
+
+%% Merge pseudo headers at the start of headers.
+
+-spec merge_pseudo_headers(pseudo_headers(), headers()) -> headers().
+
+merge_pseudo_headers(PseudoHeaders, Headers0) ->
+ lists:foldl(fun
+ ({status, Status}, Acc) when is_integer(Status) ->
+ [{<<":status">>, integer_to_binary(Status)}|Acc];
+ ({Name, Value}, Acc) ->
+ [{iolist_to_binary([$:, atom_to_binary(Name, latin1)]), Value}|Acc]
+ end, Headers0, maps:to_list(PseudoHeaders)).
+
+%% Process HTTP/2+ headers. This is done after decoding them.
+
+-spec process_headers(headers(), request | push_promise | response | trailers,
+ binary() | undefined, fin(), #{enable_connect_protocol => boolean(), any() => any()})
+ -> {headers, headers(), pseudo_headers(), non_neg_integer() | undefined}
+ | {push_promise, headers(), pseudo_headers()}
+ | {trailers, headers()}
+ | {error, atom()}.
+
+process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings)
+ when Type =:= request; Type =:= push_promise ->
+ IsExtendedConnectEnabled = maps:get(enable_connect_protocol, LocalSettings, false),
+ case request_pseudo_headers(Headers0, #{}) of
+ %% Extended CONNECT method (HTTP/2: RFC8441, HTTP/3: RFC9220).
+ {ok, PseudoHeaders=#{method := <<"CONNECT">>, scheme := _,
+ authority := _, path := _, protocol := _}, Headers}
+ when IsExtendedConnectEnabled ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, #{method := <<"CONNECT">>, scheme := _,
+ authority := _, path := _}, _}
+ when IsExtendedConnectEnabled ->
+ {error, extended_connect_missing_protocol};
+ {ok, #{protocol := _}, _} ->
+ {error, invalid_protocol_pseudo_header};
+ %% Normal CONNECT (no scheme/path).
+ {ok, PseudoHeaders = #{method := <<"CONNECT">>, authority := _}, Headers}
+ when map_size(PseudoHeaders) =:= 2 ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, #{method := <<"CONNECT">>, authority := _}, _} ->
+ {error, connect_invalid_pseudo_header};
+ {ok, #{method := <<"CONNECT">>}, _} ->
+ {error, connect_missing_authority};
+ %% Other requests.
+ {ok, PseudoHeaders = #{method := _, scheme := _, path := _}, Headers} ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, _, _} ->
+ {error, missing_pseudo_header};
+ Error = {error, _} ->
+ Error
+ end;
+process_headers(Headers0, Type = response, ReqMethod, IsFin, _LocalSettings) ->
+ case response_pseudo_headers(Headers0, #{}) of
+ {ok, PseudoHeaders=#{status := _}, Headers} ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders);
+ {ok, _, _} ->
+ {error, missing_pseudo_header};
+ Error = {error, _} ->
+ Error
+ end;
+process_headers(Headers, Type = trailers, ReqMethod, IsFin, _LocalSettings) ->
+ case trailers_have_pseudo_headers(Headers) of
+ false ->
+ regular_headers(Headers, Type, ReqMethod, IsFin, #{});
+ true ->
+ {error, trailer_invalid_pseudo_header}
end.
-parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) ->
- parse_hd_before_value(Rest, Acc, Name);
-parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) ->
- parse_hd_before_value(Rest, Acc, Name);
-parse_hd_before_value(Data, Acc, Name) ->
- parse_hd_value(Data, Acc, Name, <<>>).
-
-parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) ->
- case Rest of
- << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
- parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>);
- << $\n, Rest2/bits >> ->
- Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1),
- parse_header(Rest2, [{Name, Value}|Acc])
+request_pseudo_headers([{<<":method">>, _}|_], #{method := _}) ->
+ {error, multiple_method_pseudo_headers};
+request_pseudo_headers([{<<":method">>, Method}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{method => Method});
+request_pseudo_headers([{<<":scheme">>, _}|_], #{scheme := _}) ->
+ {error, multiple_scheme_pseudo_headers};
+request_pseudo_headers([{<<":scheme">>, Scheme}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{scheme => Scheme});
+request_pseudo_headers([{<<":authority">>, _}|_], #{authority := _}) ->
+ {error, multiple_authority_pseudo_headers};
+request_pseudo_headers([{<<":authority">>, Authority}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{authority => Authority});
+request_pseudo_headers([{<<":path">>, _}|_], #{path := _}) ->
+ {error, multiple_path_pseudo_headers};
+request_pseudo_headers([{<<":path">>, Path}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{path => Path});
+request_pseudo_headers([{<<":protocol">>, _}|_], #{protocol := _}) ->
+ {error, multiple_protocol_pseudo_headers};
+request_pseudo_headers([{<<":protocol">>, Protocol}|Tail], PseudoHeaders) ->
+ request_pseudo_headers(Tail, PseudoHeaders#{protocol => Protocol});
+request_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, invalid_pseudo_header};
+request_pseudo_headers(Headers, PseudoHeaders) ->
+ {ok, PseudoHeaders, Headers}.
+
+response_pseudo_headers([{<<":status">>, _}|_], #{status := _}) ->
+ {error, multiple_status_pseudo_headers};
+response_pseudo_headers([{<<":status">>, Status}|Tail], PseudoHeaders) ->
+ try cow_http:status_to_integer(Status) of
+ IntStatus ->
+ response_pseudo_headers(Tail, PseudoHeaders#{status => IntStatus})
+ catch _:_ ->
+ {error, invalid_status_pseudo_header}
end;
-parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) ->
- parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>).
-
-%% This function has been copied from cowboy_http.
-clean_value_ws_end(_, -1) ->
- <<>>;
-clean_value_ws_end(Value, N) ->
- case binary:at(Value, N) of
- $\s -> clean_value_ws_end(Value, N - 1);
- $\t -> clean_value_ws_end(Value, N - 1);
- _ ->
- S = N + 1,
- << Value2:S/binary, _/bits >> = Value,
- Value2
+response_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, invalid_pseudo_header};
+response_pseudo_headers(Headers, PseudoHeaders) ->
+ {ok, PseudoHeaders, Headers}.
+
+trailers_have_pseudo_headers([]) ->
+ false;
+trailers_have_pseudo_headers([{<<":", _/bits>>, _}|_]) ->
+ true;
+trailers_have_pseudo_headers([_|Tail]) ->
+ trailers_have_pseudo_headers(Tail).
+
+%% Rejecting invalid regular headers might be a bit too strong for clients.
+regular_headers(Headers, Type, ReqMethod, IsFin, PseudoHeaders) ->
+ case regular_headers(Headers, Type) of
+ ok when Type =:= request ->
+ request_expected_size(Headers, IsFin, PseudoHeaders);
+ ok when Type =:= push_promise ->
+ return_push_promise(Headers, PseudoHeaders);
+ ok when Type =:= response ->
+ response_expected_size(Headers, ReqMethod, IsFin, PseudoHeaders);
+ ok when Type =:= trailers ->
+ return_trailers(Headers);
+ Error = {error, _} ->
+ Error
end.
--ifdef(TEST).
-parse_headers_test_() ->
- Tests = [
- {<<"\r\nRest">>,
- {[], <<"Rest">>}},
- {<<"Server: Erlang/R17 \r\n\r\n">>,
- {[{<<"server">>, <<"Erlang/R17">>}], <<>>}},
- {<<"Server: Erlang/R17\r\n"
- "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
- "Multiline-Header: why hello!\r\n"
- " I didn't see you all the way over there!\r\n"
- "Content-Length: 12\r\n"
- "Content-Type: text/plain\r\n"
- "\r\nRest">>,
- {[{<<"server">>, <<"Erlang/R17">>},
- {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>},
- {<<"multiline-header">>,
- <<"why hello! I didn't see you all the way over there!">>},
- {<<"content-length">>, <<"12">>},
- {<<"content-type">>, <<"text/plain">>}],
- <<"Rest">>}}
- ],
- [{V, fun() -> R = parse_headers(V) end}
- || {V, R} <- Tests].
-
-parse_headers_error_test_() ->
- Tests = [
- <<>>,
- <<"\r">>,
- <<"Malformed\r\n\r\n">>,
- <<"content-type: text/plain\r\nMalformed\r\n\r\n">>,
- <<"HTTP/1.1 200 OK\r\n\r\n">>,
- <<0:80, "\r\n\r\n">>,
- <<"content-type: text/plain\r\ncontent-length: 12\r\n">>
+regular_headers([{<<>>, _}|_], _) ->
+ {error, empty_header_name};
+regular_headers([{<<":", _/bits>>, _}|_], _) ->
+ {error, pseudo_header_after_regular};
+regular_headers([{<<"connection">>, _}|_], _) ->
+ {error, invalid_connection_header};
+regular_headers([{<<"keep-alive">>, _}|_], _) ->
+ {error, invalid_keep_alive_header};
+regular_headers([{<<"proxy-authenticate">>, _}|_], _) ->
+ {error, invalid_proxy_authenticate_header};
+regular_headers([{<<"proxy-authorization">>, _}|_], _) ->
+ {error, invalid_proxy_authorization_header};
+regular_headers([{<<"transfer-encoding">>, _}|_], _) ->
+ {error, invalid_transfer_encoding_header};
+regular_headers([{<<"upgrade">>, _}|_], _) ->
+ {error, invalid_upgrade_header};
+regular_headers([{<<"te">>, Value}|_], request) when Value =/= <<"trailers">> ->
+ {error, invalid_te_value};
+regular_headers([{<<"te">>, _}|_], Type) when Type =/= request ->
+ {error, invalid_te_header};
+regular_headers([{Name, _}|Tail], Type) ->
+ Pattern = [
+ <<$A>>, <<$B>>, <<$C>>, <<$D>>, <<$E>>, <<$F>>, <<$G>>, <<$H>>, <<$I>>,
+ <<$J>>, <<$K>>, <<$L>>, <<$M>>, <<$N>>, <<$O>>, <<$P>>, <<$Q>>, <<$R>>,
+ <<$S>>, <<$T>>, <<$U>>, <<$V>>, <<$W>>, <<$X>>, <<$Y>>, <<$Z>>
],
- [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end}
- || V <- Tests].
-
-horse_parse_headers() ->
- horse:repeat(50000,
- parse_headers(<<"Server: Erlang/R17\r\n"
- "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
- "Multiline-Header: why hello!\r\n"
- " I didn't see you all the way over there!\r\n"
- "Content-Length: 12\r\n"
- "Content-Type: text/plain\r\n"
- "\r\nRest">>)
- ).
--endif.
-
-%% @doc Extract path and query string from a binary,
-%% removing any fragment component.
-
--spec parse_fullpath(binary()) -> {binary(), binary()}.
-parse_fullpath(Fullpath) ->
- parse_fullpath(Fullpath, <<>>).
-
-parse_fullpath(<<>>, Path) -> {Path, <<>>};
-parse_fullpath(<< $#, _/bits >>, Path) -> {Path, <<>>};
-parse_fullpath(<< $?, Qs/bits >>, Path) -> parse_fullpath_query(Qs, Path, <<>>);
-parse_fullpath(<< C, Rest/bits >>, SoFar) -> parse_fullpath(Rest, << SoFar/binary, C >>).
-
-parse_fullpath_query(<<>>, Path, Query) -> {Path, Query};
-parse_fullpath_query(<< $#, _/bits >>, Path, Query) -> {Path, Query};
-parse_fullpath_query(<< C, Rest/bits >>, Path, SoFar) ->
- parse_fullpath_query(Rest, Path, << SoFar/binary, C >>).
-
--ifdef(TEST).
-parse_fullpath_test() ->
- {<<"*">>, <<>>} = parse_fullpath(<<"*">>),
- {<<"/">>, <<>>} = parse_fullpath(<<"/">>),
- {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource#fragment">>),
- {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>),
- {<<"/">>, <<>>} = parse_fullpath(<<"/?">>),
- {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy#fragment">>),
- {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>),
- {<<"/path/to/resource">>, <<"q=cowboy">>}
- = parse_fullpath(<<"/path/to/resource?q=cowboy">>),
+ case binary:match(Name, Pattern) of
+ nomatch -> regular_headers(Tail, Type);
+ _ -> {error, uppercase_header_name}
+ end;
+regular_headers([], _) ->
ok.
--endif.
-%% @doc Convert an HTTP version to atom.
-
--spec parse_version(binary()) -> version().
-parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1';
-parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'.
+request_expected_size(Headers, IsFin, PseudoHeaders) ->
+ case [CL || {<<"content-length">>, CL} <- Headers] of
+ [] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [] ->
+ return_headers(Headers, PseudoHeaders, undefined);
+ [<<"0">>] ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when IsFin =:= fin ->
+ {error, non_zero_length_with_fin_flag};
+ [BinLen] ->
+ parse_expected_size(Headers, PseudoHeaders, BinLen);
+ _ ->
+ {error, multiple_content_length_headers}
+ end.
--ifdef(TEST).
-parse_version_test() ->
- 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>),
- 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>),
- {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)),
- ok.
--endif.
+response_expected_size(Headers, ReqMethod, IsFin, PseudoHeaders = #{status := Status}) ->
+ case [CL || {<<"content-length">>, CL} <- Headers] of
+ [] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [] ->
+ return_headers(Headers, PseudoHeaders, undefined);
+ [_] when Status >= 100, Status =< 199 ->
+ {error, invalid_content_length_header_1xx};
+ [_] when Status =:= 204 ->
+ {error, invalid_content_length_header_204};
+ [_] when Status >= 200, Status =< 299, ReqMethod =:= <<"CONNECT">> ->
+ {error, connect_invalid_content_length_2xx};
+ %% Responses to HEAD requests, and 304 responses may contain
+ %% a content-length header that must be ignored. (RFC7230 3.3.2)
+ [_] when ReqMethod =:= <<"HEAD">> ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when Status =:= 304 ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [<<"0">>] when IsFin =:= fin ->
+ return_headers(Headers, PseudoHeaders, 0);
+ [_] when IsFin =:= fin ->
+ {error, non_zero_length_with_fin_flag};
+ [BinLen] ->
+ parse_expected_size(Headers, PseudoHeaders, BinLen);
+ _ ->
+ {error, multiple_content_length_headers}
+ end.
-%% @doc Return formatted request-line and headers.
-%% @todo Add tests when the corresponding reverse functions are added.
+parse_expected_size(Headers, PseudoHeaders, BinLen) ->
+ try cow_http_hd:parse_content_length(BinLen) of
+ Len ->
+ return_headers(Headers, PseudoHeaders, Len)
+ catch _:_ ->
+ {error, invalid_content_length_header}
+ end.
--spec request(binary(), iodata(), version(), headers()) -> iodata().
-request(Method, Path, Version, Headers) ->
- [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>,
- [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers],
- <<"\r\n">>].
+return_headers(Headers, PseudoHeaders, Len) ->
+ {headers, Headers, PseudoHeaders, Len}.
--spec response(status() | binary(), version(), headers()) -> iodata().
-response(Status, Version, Headers) ->
- [version(Version), <<" ">>, status(Status), <<"\r\n">>,
- headers(Headers), <<"\r\n">>].
+return_push_promise(Headers, PseudoHeaders) ->
+ {push_promise, Headers, PseudoHeaders}.
--spec headers(headers()) -> iodata().
-headers(Headers) ->
- [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers].
+return_trailers(Headers) ->
+ {trailers, Headers}.
-%% @doc Return the version as a binary.
+%% Remove HTTP/1-specific headers.
--spec version(version()) -> binary().
-version('HTTP/1.1') -> <<"HTTP/1.1">>;
-version('HTTP/1.0') -> <<"HTTP/1.0">>.
+-spec remove_http1_headers(headers()) -> headers().
--ifdef(TEST).
-version_test() ->
- <<"HTTP/1.1">> = version('HTTP/1.1'),
- <<"HTTP/1.0">> = version('HTTP/1.0'),
- {'EXIT', _} = (catch version('HTTP/1.2')),
- ok.
--endif.
-
-%% @doc Return the status code and string as binary.
-
--spec status(status() | binary()) -> binary().
-status(100) -> <<"100 Continue">>;
-status(101) -> <<"101 Switching Protocols">>;
-status(102) -> <<"102 Processing">>;
-status(103) -> <<"103 Early Hints">>;
-status(200) -> <<"200 OK">>;
-status(201) -> <<"201 Created">>;
-status(202) -> <<"202 Accepted">>;
-status(203) -> <<"203 Non-Authoritative Information">>;
-status(204) -> <<"204 No Content">>;
-status(205) -> <<"205 Reset Content">>;
-status(206) -> <<"206 Partial Content">>;
-status(207) -> <<"207 Multi-Status">>;
-status(208) -> <<"208 Already Reported">>;
-status(226) -> <<"226 IM Used">>;
-status(300) -> <<"300 Multiple Choices">>;
-status(301) -> <<"301 Moved Permanently">>;
-status(302) -> <<"302 Found">>;
-status(303) -> <<"303 See Other">>;
-status(304) -> <<"304 Not Modified">>;
-status(305) -> <<"305 Use Proxy">>;
-status(306) -> <<"306 Switch Proxy">>;
-status(307) -> <<"307 Temporary Redirect">>;
-status(308) -> <<"308 Permanent Redirect">>;
-status(400) -> <<"400 Bad Request">>;
-status(401) -> <<"401 Unauthorized">>;
-status(402) -> <<"402 Payment Required">>;
-status(403) -> <<"403 Forbidden">>;
-status(404) -> <<"404 Not Found">>;
-status(405) -> <<"405 Method Not Allowed">>;
-status(406) -> <<"406 Not Acceptable">>;
-status(407) -> <<"407 Proxy Authentication Required">>;
-status(408) -> <<"408 Request Timeout">>;
-status(409) -> <<"409 Conflict">>;
-status(410) -> <<"410 Gone">>;
-status(411) -> <<"411 Length Required">>;
-status(412) -> <<"412 Precondition Failed">>;
-status(413) -> <<"413 Request Entity Too Large">>;
-status(414) -> <<"414 Request-URI Too Long">>;
-status(415) -> <<"415 Unsupported Media Type">>;
-status(416) -> <<"416 Requested Range Not Satisfiable">>;
-status(417) -> <<"417 Expectation Failed">>;
-status(418) -> <<"418 I'm a teapot">>;
-status(421) -> <<"421 Misdirected Request">>;
-status(422) -> <<"422 Unprocessable Entity">>;
-status(423) -> <<"423 Locked">>;
-status(424) -> <<"424 Failed Dependency">>;
-status(425) -> <<"425 Unordered Collection">>;
-status(426) -> <<"426 Upgrade Required">>;
-status(428) -> <<"428 Precondition Required">>;
-status(429) -> <<"429 Too Many Requests">>;
-status(431) -> <<"431 Request Header Fields Too Large">>;
-status(451) -> <<"451 Unavailable For Legal Reasons">>;
-status(500) -> <<"500 Internal Server Error">>;
-status(501) -> <<"501 Not Implemented">>;
-status(502) -> <<"502 Bad Gateway">>;
-status(503) -> <<"503 Service Unavailable">>;
-status(504) -> <<"504 Gateway Timeout">>;
-status(505) -> <<"505 HTTP Version Not Supported">>;
-status(506) -> <<"506 Variant Also Negotiates">>;
-status(507) -> <<"507 Insufficient Storage">>;
-status(508) -> <<"508 Loop Detected">>;
-status(510) -> <<"510 Not Extended">>;
-status(511) -> <<"511 Network Authentication Required">>;
-status(B) when is_binary(B) -> B.
+remove_http1_headers(Headers) ->
+ RemoveHeaders0 = [
+ <<"keep-alive">>,
+ <<"proxy-connection">>,
+ <<"transfer-encoding">>,
+ <<"upgrade">>
+ ],
+ RemoveHeaders = case lists:keyfind(<<"connection">>, 1, Headers) of
+ false ->
+ RemoveHeaders0;
+ {_, ConnHd} ->
+ %% We do not need to worry about any "close" header because
+ %% that header name is reserved.
+ Connection = cow_http_hd:parse_connection(ConnHd),
+ Connection ++ [<<"connection">>|RemoveHeaders0]
+ end,
+ lists:filter(fun({Name, _}) ->
+ not lists:member(Name, RemoveHeaders)
+ end, Headers).
diff --git a/src/cow_http1.erl b/src/cow_http1.erl
new file mode 100644
index 0000000..24a8c88
--- /dev/null
+++ b/src/cow_http1.erl
@@ -0,0 +1,421 @@
+%% Copyright (c) 2013-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http1).
+
+-export([parse_request_line/1]).
+-export([parse_status_line/1]).
+-export([status_to_integer/1]).
+-export([parse_headers/1]).
+
+-export([parse_fullpath/1]).
+-export([parse_version/1]).
+
+-export([request/4]).
+-export([response/3]).
+-export([headers/1]).
+-export([version/1]).
+
+-type version() :: 'HTTP/1.0' | 'HTTP/1.1'.
+-export_type([version/0]).
+
+-include("cow_inline.hrl").
+
+%% @doc Parse the request line.
+
+-spec parse_request_line(binary()) -> {binary(), binary(), version(), binary()}.
+parse_request_line(Data) ->
+ {Pos, _} = binary:match(Data, <<"\r">>),
+ <<RequestLine:Pos/binary, "\r\n", Rest/bits>> = Data,
+ [Method, Target, Version0] = binary:split(RequestLine, <<$\s>>, [trim_all, global]),
+ Version = case Version0 of
+ <<"HTTP/1.1">> -> 'HTTP/1.1';
+ <<"HTTP/1.0">> -> 'HTTP/1.0'
+ end,
+ {Method, Target, Version, Rest}.
+
+-ifdef(TEST).
+parse_request_line_test_() ->
+ Tests = [
+ {<<"GET /path HTTP/1.0\r\nRest">>,
+ {<<"GET">>, <<"/path">>, 'HTTP/1.0', <<"Rest">>}},
+ {<<"GET /path HTTP/1.1\r\nRest">>,
+ {<<"GET">>, <<"/path">>, 'HTTP/1.1', <<"Rest">>}},
+ {<<"CONNECT proxy.example.org:1080 HTTP/1.1\r\nRest">>,
+ {<<"CONNECT">>, <<"proxy.example.org:1080">>, 'HTTP/1.1', <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_request_line(V) end}
+ || {V, R} <- Tests].
+
+parse_request_line_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"GET">>,
+ <<"GET /path\r\n">>,
+ <<"GET /path HTTP/1.1">>,
+ <<"GET /path HTTP/1.1\r">>,
+ <<"GET /path HTTP/1.1\n">>,
+ <<"GET /path HTTP/0.9\r\n">>,
+ <<"content-type: text/plain\r\n">>,
+ <<0:80, "\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_request_line(V)) end}
+ || V <- Tests].
+
+horse_parse_request_line_get_path() ->
+ horse:repeat(200000,
+ parse_request_line(<<"GET /path HTTP/1.1\r\n">>)
+ ).
+-endif.
+
+%% @doc Parse the status line.
+
+-spec parse_status_line(binary()) -> {version(), cow_http:status(), binary(), binary()}.
+parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 200, <<"OK">>, Rest};
+parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 404, <<"Not Found">>, Rest};
+parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) ->
+ {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest};
+parse_status_line(<< "HTTP/1.1 ", Status/bits >>) ->
+ parse_status_line(Status, 'HTTP/1.1');
+parse_status_line(<< "HTTP/1.0 ", Status/bits >>) ->
+ parse_status_line(Status, 'HTTP/1.0').
+
+parse_status_line(<<H, T, U, " ", Rest/bits>>, Version) ->
+ Status = status_to_integer(H, T, U),
+ {Pos, _} = binary:match(Rest, <<"\r">>),
+ << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest,
+ {Version, Status, StatusStr, Rest2}.
+
+-spec status_to_integer(cow_http:status() | binary()) -> cow_http:status().
+status_to_integer(Status) when is_integer(Status) ->
+ Status;
+status_to_integer(Status) ->
+ case Status of
+ <<H, T, U>> ->
+ status_to_integer(H, T, U);
+ <<H, T, U, " ", _/bits>> ->
+ status_to_integer(H, T, U)
+ end.
+
+status_to_integer(H, T, U)
+ when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 ->
+ (H - $0) * 100 + (T - $0) * 10 + (U - $0).
+
+-ifdef(TEST).
+parse_status_line_test_() ->
+ Tests = [
+ {<<"HTTP/1.1 200 OK\r\nRest">>,
+ {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}},
+ {<<"HTTP/1.0 404 Not Found\r\nRest">>,
+ {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}},
+ {<<"HTTP/1.1 500 Something very funny here\r\nRest">>,
+ {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}},
+ {<<"HTTP/1.1 200 \r\nRest">>,
+ {'HTTP/1.1', 200, <<>>, <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_status_line(V) end}
+ || {V, R} <- Tests].
+
+parse_status_line_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"HTTP/1.1">>,
+ <<"HTTP/1.1 200\r\n">>,
+ <<"HTTP/1.1 200 OK">>,
+ <<"HTTP/1.1 200 OK\r">>,
+ <<"HTTP/1.1 200 OK\n">>,
+ <<"HTTP/0.9 200 OK\r\n">>,
+ <<"HTTP/1.1 42 Answer\r\n">>,
+ <<"HTTP/1.1 999999999 More than OK\r\n">>,
+ <<"content-type: text/plain\r\n">>,
+ <<0:80, "\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end}
+ || V <- Tests].
+
+horse_parse_status_line_200() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 200 OK\r\n">>)
+ ).
+
+horse_parse_status_line_404() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>)
+ ).
+
+horse_parse_status_line_500() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>)
+ ).
+
+horse_parse_status_line_other() ->
+ horse:repeat(200000,
+ parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>)
+ ).
+-endif.
+
+%% @doc Parse the list of headers.
+
+-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
+parse_headers(Data) ->
+ parse_header(Data, []).
+
+parse_header(<< $\r, $\n, Rest/bits >>, Acc) ->
+ {lists:reverse(Acc), Rest};
+parse_header(Data, Acc) ->
+ parse_hd_name(Data, Acc, <<>>).
+
+parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) ->
+ case C of
+ $: -> parse_hd_before_value(Rest, Acc, SoFar);
+ $\s -> parse_hd_name_ws(Rest, Acc, SoFar);
+ $\t -> parse_hd_name_ws(Rest, Acc, SoFar);
+ _ -> ?LOWER(parse_hd_name, Rest, Acc, SoFar)
+ end.
+
+parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) ->
+ case C of
+ $: -> parse_hd_before_value(Rest, Acc, Name);
+ $\s -> parse_hd_name_ws(Rest, Acc, Name);
+ $\t -> parse_hd_name_ws(Rest, Acc, Name)
+ end.
+
+parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) ->
+ parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) ->
+ parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(Data, Acc, Name) ->
+ parse_hd_value(Data, Acc, Name, <<>>).
+
+parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) ->
+ case Rest of
+ << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
+ parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>);
+ << $\n, Rest2/bits >> ->
+ Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1),
+ parse_header(Rest2, [{Name, Value}|Acc])
+ end;
+parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) ->
+ parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>).
+
+%% This function has been copied from cowboy_http.
+clean_value_ws_end(_, -1) ->
+ <<>>;
+clean_value_ws_end(Value, N) ->
+ case binary:at(Value, N) of
+ $\s -> clean_value_ws_end(Value, N - 1);
+ $\t -> clean_value_ws_end(Value, N - 1);
+ _ ->
+ S = N + 1,
+ << Value2:S/binary, _/bits >> = Value,
+ Value2
+ end.
+
+-ifdef(TEST).
+parse_headers_test_() ->
+ Tests = [
+ {<<"\r\nRest">>,
+ {[], <<"Rest">>}},
+ {<<"Server: Erlang/R17 \r\n\r\n">>,
+ {[{<<"server">>, <<"Erlang/R17">>}], <<>>}},
+ {<<"Server: Erlang/R17\r\n"
+ "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+ "Multiline-Header: why hello!\r\n"
+ " I didn't see you all the way over there!\r\n"
+ "Content-Length: 12\r\n"
+ "Content-Type: text/plain\r\n"
+ "\r\nRest">>,
+ {[{<<"server">>, <<"Erlang/R17">>},
+ {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>},
+ {<<"multiline-header">>,
+ <<"why hello! I didn't see you all the way over there!">>},
+ {<<"content-length">>, <<"12">>},
+ {<<"content-type">>, <<"text/plain">>}],
+ <<"Rest">>}}
+ ],
+ [{V, fun() -> R = parse_headers(V) end}
+ || {V, R} <- Tests].
+
+parse_headers_error_test_() ->
+ Tests = [
+ <<>>,
+ <<"\r">>,
+ <<"Malformed\r\n\r\n">>,
+ <<"content-type: text/plain\r\nMalformed\r\n\r\n">>,
+ <<"HTTP/1.1 200 OK\r\n\r\n">>,
+ <<0:80, "\r\n\r\n">>,
+ <<"content-type: text/plain\r\ncontent-length: 12\r\n">>
+ ],
+ [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end}
+ || V <- Tests].
+
+horse_parse_headers() ->
+ horse:repeat(50000,
+ parse_headers(<<"Server: Erlang/R17\r\n"
+ "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+ "Multiline-Header: why hello!\r\n"
+ " I didn't see you all the way over there!\r\n"
+ "Content-Length: 12\r\n"
+ "Content-Type: text/plain\r\n"
+ "\r\nRest">>)
+ ).
+-endif.
+
+%% @doc Extract path and query string from a binary,
+%% removing any fragment component.
+
+-spec parse_fullpath(binary()) -> {binary(), binary()}.
+parse_fullpath(Fullpath) ->
+ parse_fullpath(Fullpath, <<>>).
+
+parse_fullpath(<<>>, Path) -> {Path, <<>>};
+parse_fullpath(<< $#, _/bits >>, Path) -> {Path, <<>>};
+parse_fullpath(<< $?, Qs/bits >>, Path) -> parse_fullpath_query(Qs, Path, <<>>);
+parse_fullpath(<< C, Rest/bits >>, SoFar) -> parse_fullpath(Rest, << SoFar/binary, C >>).
+
+parse_fullpath_query(<<>>, Path, Query) -> {Path, Query};
+parse_fullpath_query(<< $#, _/bits >>, Path, Query) -> {Path, Query};
+parse_fullpath_query(<< C, Rest/bits >>, Path, SoFar) ->
+ parse_fullpath_query(Rest, Path, << SoFar/binary, C >>).
+
+-ifdef(TEST).
+parse_fullpath_test() ->
+ {<<"*">>, <<>>} = parse_fullpath(<<"*">>),
+ {<<"/">>, <<>>} = parse_fullpath(<<"/">>),
+ {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource#fragment">>),
+ {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>),
+ {<<"/">>, <<>>} = parse_fullpath(<<"/?">>),
+ {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy#fragment">>),
+ {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>),
+ {<<"/path/to/resource">>, <<"q=cowboy">>}
+ = parse_fullpath(<<"/path/to/resource?q=cowboy">>),
+ ok.
+-endif.
+
+%% @doc Convert an HTTP version to atom.
+
+-spec parse_version(binary()) -> version().
+parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1';
+parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'.
+
+-ifdef(TEST).
+parse_version_test() ->
+ 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>),
+ 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>),
+ {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)),
+ ok.
+-endif.
+
+%% @doc Return formatted request-line and headers.
+%% @todo Add tests when the corresponding reverse functions are added.
+
+-spec request(binary(), iodata(), version(), cow_http:headers()) -> iodata().
+request(Method, Path, Version, Headers) ->
+ [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>,
+ [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers],
+ <<"\r\n">>].
+
+-spec response(cow_http:status() | binary(), version(), cow_http:headers())
+ -> iodata().
+response(Status, Version, Headers) ->
+ [version(Version), <<" ">>, status(Status), <<"\r\n">>,
+ headers(Headers), <<"\r\n">>].
+
+-spec headers(cow_http:headers()) -> iodata().
+headers(Headers) ->
+ [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers].
+
+%% @doc Return the version as a binary.
+
+-spec version(version()) -> binary().
+version('HTTP/1.1') -> <<"HTTP/1.1">>;
+version('HTTP/1.0') -> <<"HTTP/1.0">>.
+
+-ifdef(TEST).
+version_test() ->
+ <<"HTTP/1.1">> = version('HTTP/1.1'),
+ <<"HTTP/1.0">> = version('HTTP/1.0'),
+ {'EXIT', _} = (catch version('HTTP/1.2')),
+ ok.
+-endif.
+
+%% @doc Return the status code and string as binary.
+
+-spec status(cow_http:status() | binary()) -> binary().
+status(100) -> <<"100 Continue">>;
+status(101) -> <<"101 Switching Protocols">>;
+status(102) -> <<"102 Processing">>;
+status(103) -> <<"103 Early Hints">>;
+status(200) -> <<"200 OK">>;
+status(201) -> <<"201 Created">>;
+status(202) -> <<"202 Accepted">>;
+status(203) -> <<"203 Non-Authoritative Information">>;
+status(204) -> <<"204 No Content">>;
+status(205) -> <<"205 Reset Content">>;
+status(206) -> <<"206 Partial Content">>;
+status(207) -> <<"207 Multi-Status">>;
+status(208) -> <<"208 Already Reported">>;
+status(226) -> <<"226 IM Used">>;
+status(300) -> <<"300 Multiple Choices">>;
+status(301) -> <<"301 Moved Permanently">>;
+status(302) -> <<"302 Found">>;
+status(303) -> <<"303 See Other">>;
+status(304) -> <<"304 Not Modified">>;
+status(305) -> <<"305 Use Proxy">>;
+status(306) -> <<"306 Switch Proxy">>;
+status(307) -> <<"307 Temporary Redirect">>;
+status(308) -> <<"308 Permanent Redirect">>;
+status(400) -> <<"400 Bad Request">>;
+status(401) -> <<"401 Unauthorized">>;
+status(402) -> <<"402 Payment Required">>;
+status(403) -> <<"403 Forbidden">>;
+status(404) -> <<"404 Not Found">>;
+status(405) -> <<"405 Method Not Allowed">>;
+status(406) -> <<"406 Not Acceptable">>;
+status(407) -> <<"407 Proxy Authentication Required">>;
+status(408) -> <<"408 Request Timeout">>;
+status(409) -> <<"409 Conflict">>;
+status(410) -> <<"410 Gone">>;
+status(411) -> <<"411 Length Required">>;
+status(412) -> <<"412 Precondition Failed">>;
+status(413) -> <<"413 Request Entity Too Large">>;
+status(414) -> <<"414 Request-URI Too Long">>;
+status(415) -> <<"415 Unsupported Media Type">>;
+status(416) -> <<"416 Requested Range Not Satisfiable">>;
+status(417) -> <<"417 Expectation Failed">>;
+status(418) -> <<"418 I'm a teapot">>;
+status(421) -> <<"421 Misdirected Request">>;
+status(422) -> <<"422 Unprocessable Entity">>;
+status(423) -> <<"423 Locked">>;
+status(424) -> <<"424 Failed Dependency">>;
+status(425) -> <<"425 Unordered Collection">>;
+status(426) -> <<"426 Upgrade Required">>;
+status(428) -> <<"428 Precondition Required">>;
+status(429) -> <<"429 Too Many Requests">>;
+status(431) -> <<"431 Request Header Fields Too Large">>;
+status(451) -> <<"451 Unavailable For Legal Reasons">>;
+status(500) -> <<"500 Internal Server Error">>;
+status(501) -> <<"501 Not Implemented">>;
+status(502) -> <<"502 Bad Gateway">>;
+status(503) -> <<"503 Service Unavailable">>;
+status(504) -> <<"504 Gateway Timeout">>;
+status(505) -> <<"505 HTTP Version Not Supported">>;
+status(506) -> <<"506 Variant Also Negotiates">>;
+status(507) -> <<"507 Insufficient Storage">>;
+status(508) -> <<"508 Loop Detected">>;
+status(510) -> <<"510 Not Extended">>;
+status(511) -> <<"511 Network Authentication Required">>;
+status(B) when is_binary(B) -> B.
diff --git a/src/cow_http2.erl b/src/cow_http2.erl
index 225d2ec..68f3625 100644
--- a/src/cow_http2.erl
+++ b/src/cow_http2.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2015-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2015-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -39,9 +39,6 @@
-type streamid() :: pos_integer().
-export_type([streamid/0]).
--type fin() :: fin | nofin.
--export_type([fin/0]).
-
-type head_fin() :: head_fin | head_nofin.
-export_type([head_fin/0]).
@@ -66,9 +63,10 @@
| unknown_error.
-export_type([error/0]).
--type frame() :: {data, streamid(), fin(), binary()}
- | {headers, streamid(), fin(), head_fin(), binary()}
- | {headers, streamid(), fin(), head_fin(), exclusive(), streamid(), weight(), binary()}
+-type frame() :: {data, streamid(), cow_http:fin(), binary()}
+ | {headers, streamid(), cow_http:fin(), head_fin(), binary()}
+ | {headers, streamid(), cow_http:fin(), head_fin(),
+ exclusive(), streamid(), weight(), binary()}
| {priority, streamid(), exclusive(), streamid(), weight()}
| {rst_stream, streamid(), error()}
| {settings, settings()}
@@ -192,8 +190,8 @@ parse(<< 5:24, 2:8, _:9, StreamID:31, _:1, StreamID:31, _:8, Rest/bits >>) ->
'PRIORITY frames cannot make a stream depend on itself. (RFC7540 5.3.1)', Rest};
parse(<< 5:24, 2:8, _:9, StreamID:31, E:1, DepStreamID:31, Weight:8, Rest/bits >>) ->
{ok, {priority, StreamID, parse_exclusive(E), DepStreamID, Weight + 1}, Rest};
-%% @todo figure out how to best deal with frame size errors; if we have everything fine
-%% if not we might want to inform the caller how much he should expect so that it can
+%% @todo Figure out how to best deal with non-fatal frame size errors; if we have everything
+%% then OK if not we might want to inform the caller how much he should expect so that it can
%% decide if it should just close the connection
parse(<< BadLen:24, 2:8, _:9, StreamID:31, _:BadLen/binary, Rest/bits >>) ->
{stream_error, StreamID, frame_size_error, 'PRIORITY frames MUST be 5 bytes wide. (RFC7540 6.3)', Rest};
@@ -204,8 +202,7 @@ parse(<< 4:24, 3:8, _:9, 0:31, _/bits >>) ->
{connection_error, protocol_error, 'RST_STREAM frames MUST be associated with a stream. (RFC7540 6.4)'};
parse(<< 4:24, 3:8, _:9, StreamID:31, ErrorCode:32, Rest/bits >>) ->
{ok, {rst_stream, StreamID, parse_error_code(ErrorCode)}, Rest};
-%% @todo same as priority
-parse(<< _:24, 3:8, _:9, _:31, _/bits >>) ->
+parse(<< BadLen:24, 3:8, _:9, _:31, _/bits >>) when BadLen =/= 4 ->
{connection_error, frame_size_error, 'RST_STREAM frames MUST be 4 bytes wide. (RFC7540 6.4)'};
%%
%% SETTINGS frames.
diff --git a/src/cow_http2_machine.erl b/src/cow_http2_machine.erl
index 35eb72e..808c6cf 100644
--- a/src/cow_http2_machine.erl
+++ b/src/cow_http2_machine.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2018-2024, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -49,6 +49,7 @@
max_concurrent_streams => non_neg_integer() | infinity,
max_decode_table_size => non_neg_integer(),
max_encode_table_size => non_neg_integer(),
+ max_fragmented_header_block_size => 16384..16#7fffffff,
max_frame_size_received => 16384..16777215,
max_frame_size_sent => 16384..16777215 | infinity,
max_stream_window_size => 0..16#7fffffff,
@@ -75,19 +76,19 @@
method = undefined :: binary(),
%% Whether we finished sending data.
- local = idle :: idle | cow_http2:fin(),
+ local = idle :: idle | cow_http:fin(),
%% Local flow control window (how much we can send).
local_window :: integer(),
%% Buffered data waiting for the flow control window to increase.
local_buffer = queue:new() ::
- queue:queue({cow_http2:fin(), non_neg_integer(), {data, iodata()} | #sendfile{}}),
+ queue:queue({cow_http:fin(), non_neg_integer(), {data, iodata()} | #sendfile{}}),
local_buffer_size = 0 :: non_neg_integer(),
local_trailers = undefined :: undefined | cow_http:headers(),
%% Whether we finished receiving data.
- remote = idle :: idle | cow_http2:fin(),
+ remote = idle :: idle | cow_http:fin(),
%% Remote flow control window (how much we accept to receive).
remote_window :: integer(),
@@ -104,7 +105,7 @@
-type stream() :: #stream{}.
-type continued_frame() ::
- {headers, cow_http2:streamid(), cow_http2:fin(), cow_http2:head_fin(), binary()} |
+ {headers, cow_http2:streamid(), cow_http:fin(), cow_http2:head_fin(), binary()} |
{push_promise, cow_http2:streamid(), cow_http2:head_fin(), cow_http2:streamid(), binary()}.
-record(http2_machine, {
@@ -133,8 +134,9 @@
initial_window_size => 65535
% max_frame_size => 16384
% max_header_list_size => infinity
+% enable_connect_protocol => false
} :: map(),
- next_settings = undefined :: undefined | map(),
+ next_settings = #{} :: map(),
remote_settings = #{
initial_window_size => 65535
} :: map(),
@@ -170,20 +172,6 @@
-opaque http2_machine() :: #http2_machine{}.
-export_type([http2_machine/0]).
--type pseudo_headers() :: #{} %% Trailers
- | #{ %% Responses.
- status := cow_http:status()
- } | #{ %% Normal CONNECT requests.
- method := binary(),
- authority := binary()
- } | #{ %% Other requests and extended CONNECT requests.
- method := binary(),
- scheme := binary(),
- authority := binary(),
- path := binary(),
- protocol => binary()
- }.
-
%% Returns true when the given StreamID is for a local-initiated stream.
-define(IS_SERVER_LOCAL(StreamID), ((StreamID rem 2) =:= 0)).
-define(IS_CLIENT_LOCAL(StreamID), ((StreamID rem 2) =:= 1)).
@@ -291,15 +279,16 @@ init_upgrade_stream(Method, State=#http2_machine{mode=server, remote_streamid=0,
-spec frame(cow_http2:frame(), State)
-> {ok, State}
- | {ok, {data, cow_http2:streamid(), cow_http2:fin(), binary()}, State}
- | {ok, {headers, cow_http2:streamid(), cow_http2:fin(),
- cow_http:headers(), pseudo_headers(), non_neg_integer() | undefined}, State}
+ | {ok, {data, cow_http2:streamid(), cow_http:fin(), binary()}, State}
+ | {ok, {headers, cow_http2:streamid(), cow_http:fin(),
+ cow_http:headers(), cow_http:pseudo_headers(),
+ non_neg_integer() | undefined}, State}
| {ok, {trailers, cow_http2:streamid(), cow_http:headers()}, State}
| {ok, {rst_stream, cow_http2:streamid(), cow_http2:error()}, State}
| {ok, {push_promise, cow_http2:streamid(), cow_http2:streamid(),
- cow_http:headers(), pseudo_headers()}, State}
+ cow_http:headers(), cow_http:pseudo_headers()}, State}
| {ok, {goaway, cow_http2:streamid(), cow_http2:error(), binary()}, State}
- | {send, [{cow_http2:streamid(), cow_http2:fin(),
+ | {send, [{cow_http2:streamid(), cow_http:fin(),
[{data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}]}], State}
| {error, {stream_error, cow_http2:streamid(), cow_http2:error(), atom()}, State}
| {error, {connection_error, cow_http2:error(), atom()}, State}
@@ -433,7 +422,7 @@ is_body_size_valid(_) ->
%% The order of the fields matter.
-record(headers, {
id :: cow_http2:streamid(),
- fin :: cow_http2:fin(),
+ fin :: cow_http:fin(),
head :: cow_http2:head_fin(),
data :: binary()
}).
@@ -443,8 +432,8 @@ headers_frame(Frame=#headers{}, State=#http2_machine{mode=Mode}) ->
server -> server_headers_frame(Frame, State);
client -> client_headers_frame(Frame, State)
end;
-%% @todo Handle the PRIORITY data, but only if this returns an ok tuple.
-%% @todo Do not lose the PRIORITY information if CONTINUATION frames follow.
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it.
headers_frame({headers, StreamID, IsFin, IsHeadFin,
_IsExclusive, _DepStreamID, _Weight, HeaderData},
State=#http2_machine{mode=Mode}) ->
@@ -535,7 +524,7 @@ headers_decode(Frame=#headers{head=head_fin, data=HeaderData},
headers_enforce_concurrency_limit(Frame,
State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers);
{Headers, DecodeState} ->
- headers_pseudo_headers(Frame,
+ headers_process(Frame,
State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers)
catch _:_ ->
{error, {connection_error, compression_error,
@@ -551,239 +540,95 @@ headers_enforce_concurrency_limit(Frame=#headers{id=StreamID},
%% in the Streams variable yet and so we'll end up with +1 stream.
case map_size(Streams) < MaxConcurrentStreams of
true ->
- headers_pseudo_headers(Frame, State, Type, Stream, Headers);
+ headers_process(Frame, State, Type, Stream, Headers);
false ->
{error, {stream_error, StreamID, refused_stream,
'Maximum number of concurrent streams has been reached. (RFC7540 5.1.2)'},
State}
end.
-headers_pseudo_headers(Frame, State=#http2_machine{local_settings=LocalSettings},
- Type, Stream, Headers0) when Type =:= request; Type =:= push_promise ->
- IsExtendedConnectEnabled = maps:get(enable_connect_protocol, LocalSettings, false),
- case request_pseudo_headers(Headers0, #{}) of
- %% Extended CONNECT method (RFC8441).
- {ok, PseudoHeaders=#{method := <<"CONNECT">>, scheme := _,
- authority := _, path := _, protocol := _}, Headers}
- when IsExtendedConnectEnabled ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, #{method := <<"CONNECT">>, scheme := _,
- authority := _, path := _}, _}
- when IsExtendedConnectEnabled ->
- headers_malformed(Frame, State,
- 'The :protocol pseudo-header MUST be sent with an extended CONNECT. (RFC8441 4)');
- {ok, #{protocol := _}, _} ->
- headers_malformed(Frame, State,
- 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC8441 4)');
- %% Normal CONNECT (no scheme/path).
- {ok, PseudoHeaders=#{method := <<"CONNECT">>, authority := _}, Headers}
- when map_size(PseudoHeaders) =:= 2 ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, #{method := <<"CONNECT">>}, _} ->
- headers_malformed(Frame, State,
- 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC7540 8.3)');
- %% Other requests.
- {ok, PseudoHeaders=#{method := _, scheme := _, path := _}, Headers} ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, _, _} ->
- headers_malformed(Frame, State,
- 'A required pseudo-header was not found. (RFC7540 8.1.2.3)');
- {error, HumanReadable} ->
- headers_malformed(Frame, State, HumanReadable)
- end;
-headers_pseudo_headers(Frame=#headers{id=StreamID},
- State, Type=response, Stream, Headers0) ->
- case response_pseudo_headers(Headers0, #{}) of
- {ok, PseudoHeaders=#{status := _}, Headers} ->
- headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers);
- {ok, _, _} ->
- stream_reset(StreamID, State, protocol_error,
- 'A required pseudo-header was not found. (RFC7540 8.1.2.4)');
- {error, HumanReadable} ->
- stream_reset(StreamID, State, protocol_error, HumanReadable)
- end;
-headers_pseudo_headers(Frame=#headers{id=StreamID},
- State, Type=trailers, Stream, Headers) ->
- case trailers_contain_pseudo_headers(Headers) of
- false ->
- headers_regular_headers(Frame, State, Type, Stream, #{}, Headers);
- true ->
- stream_reset(StreamID, State, protocol_error,
- 'Trailer header blocks must not contain pseudo-headers. (RFC7540 8.1.2.1)')
+headers_process(Frame=#headers{id=StreamID, fin=IsFin},
+ State=#http2_machine{local_settings=LocalSettings},
+ Type, Stream, Headers0) ->
+ ReqMethod = case Stream of
+ #stream{method=ReqMethod0} -> ReqMethod0;
+ undefined -> undefined
+ end,
+ case cow_http:process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings) of
+ {headers, Headers, PseudoHeaders, Len} ->
+ headers_frame(Frame, State, Type, Stream, Headers, PseudoHeaders, Len);
+ {push_promise, Headers, PseudoHeaders} ->
+ push_promise_frame(Frame, State, Stream, Headers, PseudoHeaders);
+ {trailers, Headers} ->
+ trailers_frame(Frame, State, Stream, Headers);
+ {error, Reason} when Type =:= request ->
+ headers_malformed(Frame, State, format_error(Reason));
+ {error, Reason} ->
+ stream_reset(StreamID, State, protocol_error, format_error(Reason))
end.
headers_malformed(#headers{id=StreamID}, State, HumanReadable) ->
{error, {stream_error, StreamID, protocol_error, HumanReadable}, State}.
-request_pseudo_headers([{<<":method">>, _}|_], #{method := _}) ->
- {error, 'Multiple :method pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":method">>, Method}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{method => Method});
-request_pseudo_headers([{<<":scheme">>, _}|_], #{scheme := _}) ->
- {error, 'Multiple :scheme pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":scheme">>, Scheme}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{scheme => Scheme});
-request_pseudo_headers([{<<":authority">>, _}|_], #{authority := _}) ->
- {error, 'Multiple :authority pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":authority">>, Authority}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{authority => Authority});
-request_pseudo_headers([{<<":path">>, _}|_], #{path := _}) ->
- {error, 'Multiple :path pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":path">>, Path}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{path => Path});
-request_pseudo_headers([{<<":protocol">>, _}|_], #{protocol := _}) ->
- {error, 'Multiple :protocol pseudo-headers were found. (RFC7540 8.1.2.3)'};
-request_pseudo_headers([{<<":protocol">>, Protocol}|Tail], PseudoHeaders) ->
- request_pseudo_headers(Tail, PseudoHeaders#{protocol => Protocol});
-request_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'};
-request_pseudo_headers(Headers, PseudoHeaders) ->
- {ok, PseudoHeaders, Headers}.
-
-response_pseudo_headers([{<<":status">>, _}|_], #{status := _}) ->
- {error, 'Multiple :status pseudo-headers were found. (RFC7540 8.1.2.3)'};
-response_pseudo_headers([{<<":status">>, Status}|Tail], PseudoHeaders) ->
- try cow_http:status_to_integer(Status) of
- IntStatus ->
- response_pseudo_headers(Tail, PseudoHeaders#{status => IntStatus})
- catch _:_ ->
- {error, 'The :status pseudo-header value is invalid. (RFC7540 8.1.2.4)'}
- end;
-response_pseudo_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'};
-response_pseudo_headers(Headers, PseudoHeaders) ->
- {ok, PseudoHeaders, Headers}.
-
-trailers_contain_pseudo_headers([]) ->
- false;
-trailers_contain_pseudo_headers([{<<":", _/bits>>, _}|_]) ->
- true;
-trailers_contain_pseudo_headers([_|Tail]) ->
- trailers_contain_pseudo_headers(Tail).
-
-%% Rejecting invalid regular headers might be a bit too strong for clients.
-headers_regular_headers(Frame=#headers{id=StreamID},
- State, Type, Stream, PseudoHeaders, Headers) ->
- case regular_headers(Headers, Type) of
- ok when Type =:= request ->
- request_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers);
- ok when Type =:= push_promise ->
- push_promise_frame(Frame, State, Stream, PseudoHeaders, Headers);
- ok when Type =:= response ->
- response_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers);
- ok when Type =:= trailers ->
- trailers_frame(Frame, State, Stream, Headers);
- {error, HumanReadable} when Type =:= request ->
- headers_malformed(Frame, State, HumanReadable);
- {error, HumanReadable} ->
- stream_reset(StreamID, State, protocol_error, HumanReadable)
- end.
-
-regular_headers([{<<>>, _}|_], _) ->
- {error, 'Empty header names are not valid regular headers. (CVE-2019-9516)'};
-regular_headers([{<<":", _/bits>>, _}|_], _) ->
- {error, 'Pseudo-headers were found after regular headers. (RFC7540 8.1.2.1)'};
-regular_headers([{<<"connection">>, _}|_], _) ->
- {error, 'The connection header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"keep-alive">>, _}|_], _) ->
- {error, 'The keep-alive header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"proxy-authenticate">>, _}|_], _) ->
- {error, 'The proxy-authenticate header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"proxy-authorization">>, _}|_], _) ->
- {error, 'The proxy-authorization header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"transfer-encoding">>, _}|_], _) ->
- {error, 'The transfer-encoding header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"upgrade">>, _}|_], _) ->
- {error, 'The upgrade header is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"te">>, Value}|_], request) when Value =/= <<"trailers">> ->
- {error, 'The te header with a value other than "trailers" is not allowed. (RFC7540 8.1.2.2)'};
-regular_headers([{<<"te">>, _}|_], Type) when Type =/= request ->
- {error, 'The te header is only allowed in request headers. (RFC7540 8.1.2.2)'};
-regular_headers([{Name, _}|Tail], Type) ->
- Pattern = [
- <<$A>>, <<$B>>, <<$C>>, <<$D>>, <<$E>>, <<$F>>, <<$G>>, <<$H>>, <<$I>>,
- <<$J>>, <<$K>>, <<$L>>, <<$M>>, <<$N>>, <<$O>>, <<$P>>, <<$Q>>, <<$R>>,
- <<$S>>, <<$T>>, <<$U>>, <<$V>>, <<$W>>, <<$X>>, <<$Y>>, <<$Z>>
- ],
- case binary:match(Name, Pattern) of
- nomatch -> regular_headers(Tail, Type);
- _ -> {error, 'Header names must be lowercase. (RFC7540 8.1.2)'}
- end;
-regular_headers([], _) ->
- ok.
-
-request_expected_size(Frame=#headers{fin=IsFin}, State, Type, Stream, PseudoHeaders, Headers) ->
- case [CL || {<<"content-length">>, CL} <- Headers] of
- [] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [] ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined);
- [<<"0">>] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when IsFin =:= fin ->
- headers_malformed(Frame, State,
- 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)');
- [BinLen] ->
- headers_parse_expected_size(Frame, State, Type, Stream,
- PseudoHeaders, Headers, BinLen);
- _ ->
- headers_malformed(Frame, State,
- 'Multiple content-length headers were received. (RFC7230 3.3.2)')
- end.
-
-response_expected_size(Frame=#headers{id=StreamID, fin=IsFin}, State, Type,
- Stream=#stream{method=Method}, PseudoHeaders=#{status := Status}, Headers) ->
- case [CL || {<<"content-length">>, CL} <- Headers] of
- [] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [] ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined);
- [_] when Status >= 100, Status =< 199 ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 1xx response. (RFC7230 3.3.2)');
- [_] when Status =:= 204 ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 204 response. (RFC7230 3.3.2)');
- [_] when Status >= 200, Status =< 299, Method =:= <<"CONNECT">> ->
- stream_reset(StreamID, State, protocol_error,
- 'Content-length header received in a 2xx response to a CONNECT request. (RFC7230 3.3.2).');
- %% Responses to HEAD requests, and 304 responses may contain
- %% a content-length header that must be ignored. (RFC7230 3.3.2)
- [_] when Method =:= <<"HEAD">> ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when Status =:= 304 ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [<<"0">>] when IsFin =:= fin ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0);
- [_] when IsFin =:= fin ->
- stream_reset(StreamID, State, protocol_error,
- 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)');
- [BinLen] ->
- headers_parse_expected_size(Frame, State, Type, Stream,
- PseudoHeaders, Headers, BinLen);
- _ ->
- stream_reset(StreamID, State, protocol_error,
- 'Multiple content-length headers were received. (RFC7230 3.3.2)')
- end.
-
-headers_parse_expected_size(Frame=#headers{id=StreamID},
- State, Type, Stream, PseudoHeaders, Headers, BinLen) ->
- try cow_http_hd:parse_content_length(BinLen) of
- Len ->
- headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, Len)
- catch
- _:_ ->
- HumanReadable = 'The content-length header is invalid. (RFC7230 3.3.2)',
- case Type of
- request -> headers_malformed(Frame, State, HumanReadable);
- response -> stream_reset(StreamID, State, protocol_error, HumanReadable)
- end
- end.
+format_error(connect_invalid_pseudo_header) ->
+ 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC7540 8.3)';
+format_error(connect_missing_authority) ->
+ 'CONNECT requests must include the :authority pseudo-header. (RFC7540 8.3)';
+format_error(empty_header_name) ->
+ 'Empty header names are not valid regular headers. (CVE-2019-9516)';
+format_error(extended_connect_missing_protocol) ->
+ 'The :protocol pseudo-header MUST be sent with an extended CONNECT. (RFC8441 4)';
+format_error(invalid_connection_header) ->
+ 'The connection header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_keep_alive_header) ->
+ 'The keep-alive header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_protocol_pseudo_header) ->
+ 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC8441 4)';
+format_error(invalid_proxy_authenticate_header) ->
+ 'The proxy-authenticate header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_proxy_authorization_header) ->
+ 'The proxy-authorization header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_pseudo_header) ->
+ 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)';
+format_error(invalid_status_pseudo_header) ->
+ 'The :status pseudo-header value is invalid. (RFC7540 8.1.2.4)';
+format_error(invalid_te_header) ->
+ 'The te header is only allowed in request headers. (RFC7540 8.1.2.2)';
+format_error(invalid_te_value) ->
+ 'The te header with a value other than "trailers" is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_transfer_encoding_header) ->
+ 'The transfer-encoding header is not allowed. (RFC7540 8.1.2.2)';
+format_error(invalid_upgrade_header) ->
+ 'The upgrade header is not allowed. (RFC7540 8.1.2.2)';
+format_error(missing_pseudo_header) ->
+ 'A required pseudo-header was not found. (RFC7540 8.1.2.3, RFC7540 8.1.2.4)';
+format_error(multiple_authority_pseudo_headers) ->
+ 'Multiple :authority pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_method_pseudo_headers) ->
+ 'Multiple :method pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_path_pseudo_headers) ->
+ 'Multiple :path pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_protocol_pseudo_headers) ->
+ 'Multiple :protocol pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_scheme_pseudo_headers) ->
+ 'Multiple :scheme pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(multiple_status_pseudo_headers) ->
+ 'Multiple :status pseudo-headers were found. (RFC7540 8.1.2.3)';
+format_error(non_zero_length_with_fin_flag) ->
+ 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)';
+format_error(pseudo_header_after_regular) ->
+ 'Pseudo-headers were found after regular headers. (RFC7540 8.1.2.1)';
+format_error(trailer_invalid_pseudo_header) ->
+ 'Trailer header blocks must not contain pseudo-headers. (RFC7540 8.1.2.1)';
+format_error(uppercase_header_name) ->
+ 'Header names must be lowercase. (RFC7540 8.1.2)';
+format_error(Reason) ->
+ cow_http:format_semantic_error(Reason).
headers_frame(#headers{id=StreamID, fin=IsFin}, State0=#http2_machine{
local_settings=#{initial_window_size := RemoteWindow},
remote_settings=#{initial_window_size := LocalWindow}},
- Type, Stream0, PseudoHeaders, Headers, Len) ->
+ Type, Stream0, Headers, PseudoHeaders, Len) ->
{Stream, State1} = case Type of
request ->
TE = case lists:keyfind(<<"te">>, 1, Headers) of
@@ -817,7 +662,8 @@ trailers_frame(#headers{id=StreamID}, State0, Stream0, Headers) ->
%% PRIORITY frame.
%%
-%% @todo Handle PRIORITY frames.
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it.
priority_frame(_Frame, State) ->
{ok, State}.
@@ -966,7 +812,7 @@ push_promise_frame(#headers{id=PromisedStreamID},
State0=#http2_machine{
local_settings=#{initial_window_size := RemoteWindow},
remote_settings=#{initial_window_size := LocalWindow}},
- #stream{id=StreamID}, PseudoHeaders=#{method := Method}, Headers) ->
+ #stream{id=StreamID}, Headers, PseudoHeaders=#{method := Method}) ->
TE = case lists:keyfind(<<"te">>, 1, Headers) of
{_, TE0} -> TE0;
false -> undefined
@@ -1049,32 +895,61 @@ unexpected_continuation_frame(#continuation{}, State) ->
continuation_frame(#continuation{id=StreamID, head=head_fin, data=HeaderFragment1},
State=#http2_machine{state={continuation, Type,
Frame=#headers{id=StreamID, data=HeaderFragment0}}}) ->
- HeaderData = <<HeaderFragment0/binary, HeaderFragment1/binary>>,
- headers_decode(Frame#headers{head=head_fin, data=HeaderData},
- State#http2_machine{state=normal}, Type, stream_get(StreamID, State));
+ case continuation_frame_append(HeaderFragment0, HeaderFragment1, State) of
+ {ok, HeaderData} ->
+ headers_decode(Frame#headers{head=head_fin, data=HeaderData},
+ State#http2_machine{state=normal}, Type, stream_get(StreamID, State));
+ Error ->
+ Error
+ end;
continuation_frame(#continuation{id=StreamID, head=head_fin, data=HeaderFragment1},
State=#http2_machine{state={continuation, Type, #push_promise{
id=StreamID, promised_id=PromisedStreamID, data=HeaderFragment0}}}) ->
- HeaderData = <<HeaderFragment0/binary, HeaderFragment1/binary>>,
- headers_decode(#headers{id=PromisedStreamID, fin=fin, head=head_fin, data=HeaderData},
- State#http2_machine{state=normal}, Type, undefined);
+ case continuation_frame_append(HeaderFragment0, HeaderFragment1, State) of
+ {ok, HeaderData} ->
+ headers_decode(#headers{id=PromisedStreamID, fin=fin,
+ head=head_fin, data=HeaderData},
+ State#http2_machine{state=normal}, Type, undefined);
+ Error ->
+ Error
+ end;
continuation_frame(#continuation{id=StreamID, data=HeaderFragment1},
- State=#http2_machine{state={continuation, Type, ContinuedFrame0}})
- when element(2, ContinuedFrame0) =:= StreamID ->
- ContinuedFrame = case ContinuedFrame0 of
+ State=#http2_machine{state={continuation, Type, ContinuedFrame}})
+ when element(2, ContinuedFrame) =:= StreamID ->
+ case ContinuedFrame of
#headers{data=HeaderFragment0} ->
- HeaderData = <<HeaderFragment0/binary, HeaderFragment1/binary>>,
- ContinuedFrame0#headers{data=HeaderData};
+ case continuation_frame_append(HeaderFragment0, HeaderFragment1, State) of
+ {ok, HeaderData} ->
+ {ok, State#http2_machine{state={continuation, Type,
+ ContinuedFrame#headers{data=HeaderData}}}};
+ Error ->
+ Error
+ end;
#push_promise{data=HeaderFragment0} ->
- HeaderData = <<HeaderFragment0/binary, HeaderFragment1/binary>>,
- ContinuedFrame0#push_promise{data=HeaderData}
- end,
- {ok, State#http2_machine{state={continuation, Type, ContinuedFrame}}};
+ case continuation_frame_append(HeaderFragment0, HeaderFragment1, State) of
+ {ok, HeaderData} ->
+ {ok, State#http2_machine{state={continuation, Type,
+ ContinuedFrame#push_promise{data=HeaderData}}}};
+ Error ->
+ Error
+ end
+ end;
continuation_frame(_F, State) ->
{error, {connection_error, protocol_error,
'An invalid frame was received in the middle of a header block. (RFC7540 6.2)'},
State}.
+continuation_frame_append(Fragment0, Fragment1, State=#http2_machine{opts=Opts}) ->
+ MaxSize = maps:get(max_fragmented_header_block_size, Opts, 32768),
+ case byte_size(Fragment0) + byte_size(Fragment1) =< MaxSize of
+ true ->
+ {ok, <<Fragment0/binary, Fragment1/binary>>};
+ false ->
+ {error, {connection_error, enhance_your_calm,
+ 'Larger fragmented header block size than we are willing to accept.'},
+ State}
+ end.
+
%% Ignored frames.
-spec ignored_frame(State)
@@ -1111,9 +986,9 @@ timeout(_, _, State) ->
%% this module does not send data directly, instead it returns
%% a value that can then be used to send the frames.
--spec prepare_headers(cow_http2:streamid(), State, idle | cow_http2:fin(),
- pseudo_headers(), cow_http:headers())
- -> {ok, cow_http2:fin(), iodata(), State} when State::http2_machine().
+-spec prepare_headers(cow_http2:streamid(), State, idle | cow_http:fin(),
+ cow_http:pseudo_headers(), cow_http:headers())
+ -> {ok, cow_http:fin(), iodata(), State} when State::http2_machine().
prepare_headers(StreamID, State=#http2_machine{encode_state=EncodeState0},
IsFin0, PseudoHeaders, Headers0) ->
Stream = #stream{method=Method, local=idle} = stream_get(StreamID, State),
@@ -1122,12 +997,14 @@ prepare_headers(StreamID, State=#http2_machine{encode_state=EncodeState0},
{_, <<"HEAD">>} -> fin;
_ -> IsFin0
end,
- Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)),
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
{HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0),
{ok, IsFin, HeaderBlock, stream_store(Stream#stream{local=IsFin0},
State#http2_machine{encode_state=EncodeState})}.
--spec prepare_push_promise(cow_http2:streamid(), State, pseudo_headers(), cow_http:headers())
+-spec prepare_push_promise(cow_http2:streamid(), State,
+ cow_http:pseudo_headers(), cow_http:headers())
-> {ok, cow_http2:streamid(), iodata(), State}
| {error, no_push} when State::http2_machine().
prepare_push_promise(_, #http2_machine{remote_settings=#{enable_push := false}}, _, _) ->
@@ -1141,7 +1018,8 @@ prepare_push_promise(StreamID, State=#http2_machine{encode_state=EncodeState0,
{_, TE0} -> TE0;
false -> undefined
end,
- Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)),
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
{HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0),
{ok, LocalStreamID, HeaderBlock, stream_store(
#stream{id=LocalStreamID, method=maps:get(method, PseudoHeaders),
@@ -1149,34 +1027,6 @@ prepare_push_promise(StreamID, State=#http2_machine{encode_state=EncodeState0,
local_window=LocalWindow, remote_window=RemoteWindow, te=TE},
State#http2_machine{encode_state=EncodeState, local_streamid=LocalStreamID + 2})}.
-remove_http11_headers(Headers) ->
- RemoveHeaders0 = [
- <<"keep-alive">>,
- <<"proxy-connection">>,
- <<"transfer-encoding">>,
- <<"upgrade">>
- ],
- RemoveHeaders = case lists:keyfind(<<"connection">>, 1, Headers) of
- false ->
- RemoveHeaders0;
- {_, ConnHd} ->
- %% We do not need to worry about any "close" header because
- %% that header name is reserved.
- Connection = cow_http_hd:parse_connection(ConnHd),
- Connection ++ [<<"connection">>|RemoveHeaders0]
- end,
- lists:filter(fun({Name, _}) ->
- not lists:member(Name, RemoveHeaders)
- end, Headers).
-
-merge_pseudo_headers(PseudoHeaders, Headers0) ->
- lists:foldl(fun
- ({status, Status}, Acc) when is_integer(Status) ->
- [{<<":status">>, integer_to_binary(Status)}|Acc];
- ({Name, Value}, Acc) ->
- [{iolist_to_binary([$:, atom_to_binary(Name, latin1)]), Value}|Acc]
- end, Headers0, maps:to_list(PseudoHeaders)).
-
-spec prepare_trailers(cow_http2:streamid(), State, cow_http:headers())
-> {ok, iodata(), State} when State::http2_machine().
prepare_trailers(StreamID, State=#http2_machine{encode_state=EncodeState0}, Trailers) ->
@@ -1185,9 +1035,9 @@ prepare_trailers(StreamID, State=#http2_machine{encode_state=EncodeState0}, Trai
{ok, HeaderBlock, stream_store(Stream#stream{local=fin},
State#http2_machine{encode_state=EncodeState})}.
--spec send_or_queue_data(cow_http2:streamid(), State, cow_http2:fin(), DataOrFileOrTrailers)
+-spec send_or_queue_data(cow_http2:streamid(), State, cow_http:fin(), DataOrFileOrTrailers)
-> {ok, State}
- | {send, [{cow_http2:streamid(), cow_http2:fin(), [DataOrFileOrTrailers]}], State}
+ | {send, [{cow_http2:streamid(), cow_http:fin(), [DataOrFileOrTrailers]}], State}
when State::http2_machine(), DataOrFileOrTrailers::
{data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}.
send_or_queue_data(StreamID, State0=#http2_machine{opts=Opts, local_window=ConnWindow},
@@ -1242,8 +1092,8 @@ send_or_queue_data(StreamID, State0=#http2_machine{opts=Opts, local_window=ConnW
%% Internal data sending/queuing functions.
-%% @todo Should we ever want to implement the PRIORITY mechanism,
-%% this would be the place to do it. Right now, we just go over
+%% The PRIORITY mechanism is seen as flawed and deprecated.
+%% We will not implement it. So we just go over
%% all streams and send what we can until either everything is
%% sent or we run out of space in the window.
send_data(State0=#http2_machine{streams=Streams0}) ->
@@ -1577,7 +1427,7 @@ get_stream_local_buffer_size(StreamID, State=#http2_machine{mode=Mode,
%% Retrieve the local state for a stream, including the state in the queue.
-spec get_stream_local_state(cow_http2:streamid(), http2_machine())
- -> {ok, idle | cow_http2:fin(), empty | nofin | fin} | {error, not_found | closed}.
+ -> {ok, idle | cow_http:fin(), empty | nofin | fin} | {error, not_found | closed}.
get_stream_local_state(StreamID, State=#http2_machine{mode=Mode,
local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) ->
case stream_get(StreamID, State) of
@@ -1600,7 +1450,7 @@ get_stream_local_state(StreamID, State=#http2_machine{mode=Mode,
%% Retrieve the remote state for a stream.
-spec get_stream_remote_state(cow_http2:streamid(), http2_machine())
- -> {ok, idle | cow_http2:fin()} | {error, not_found | closed}.
+ -> {ok, idle | cow_http:fin()} | {error, not_found | closed}.
get_stream_remote_state(StreamID, State=#http2_machine{mode=Mode,
local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) ->
case stream_get(StreamID, State) of
diff --git a/src/cow_http3.erl b/src/cow_http3.erl
new file mode 100644
index 0000000..d3776ec
--- /dev/null
+++ b/src/cow_http3.erl
@@ -0,0 +1,458 @@
+%% Copyright (c) 2023-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http3).
+
+%% Parsing.
+-export([parse/1]).
+-export([parse_unidi_stream_header/1]).
+-export([code_to_error/1]).
+
+%% Building.
+-export([data/1]).
+-export([headers/1]).
+-export([settings/1]).
+-export([error_to_code/1]).
+-export([encode_int/1]).
+
+-type stream_id() :: non_neg_integer().
+-export_type([stream_id/0]).
+
+-type push_id() :: non_neg_integer().
+-export_type([push_id/0]).
+
+-type settings() :: #{
+ qpack_max_table_capacity => 0..16#3fffffffffffffff,
+ max_field_section_size => 0..16#3fffffffffffffff,
+ qpack_blocked_streams => 0..16#3fffffffffffffff,
+ enable_connect_protocol => boolean()
+}.
+-export_type([settings/0]).
+
+-type error() :: h3_no_error
+ | h3_general_protocol_error
+ | h3_internal_error
+ | h3_stream_creation_error
+ | h3_closed_critical_stream
+ | h3_frame_unexpected
+ | h3_frame_error
+ | h3_excessive_load
+ | h3_id_error
+ | h3_settings_error
+ | h3_missing_settings
+ | h3_request_rejected
+ | h3_request_cancelled
+ | h3_request_incomplete
+ | h3_message_error
+ | h3_connect_error
+ | h3_version_fallback.
+-export_type([error/0]).
+
+-type frame() :: {data, binary()}
+ | {headers, binary()}
+ | {cancel_push, push_id()}
+ | {settings, settings()}
+ | {push_promise, push_id(), binary()}
+ | {goaway, stream_id() | push_id()}
+ | {max_push_id, push_id()}.
+-export_type([frame/0]).
+
+%% Parsing.
+
+-spec parse(binary())
+ -> {ok, frame(), binary()}
+ | {more, {data, binary()} | ignore, non_neg_integer()}
+ | {ignore, binary()}
+ | {connection_error, h3_frame_error | h3_frame_unexpected | h3_settings_error, atom()}
+ | more.
+
+%%
+%% DATA frames.
+%%
+parse(<<0, 0:2, Len:6, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 1:2, Len:14, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 2:2, Len:30, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+parse(<<0, 3:2, Len:62, Data:Len/binary, Rest/bits>>) ->
+ {ok, {data, Data}, Rest};
+%% DATA frames may be split over multiple QUIC packets
+%% but we want to process them immediately rather than
+%% risk buffering a very large payload.
+parse(<<0, 0:2, Len:6, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 1:2, Len:14, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 2:2, Len:30, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+parse(<<0, 3:2, Len:62, Data/bits>>) when byte_size(Data) < Len ->
+ {more, {data, Data}, Len - byte_size(Data)};
+%%
+%% HEADERS frames.
+%%
+parse(<<1, 0:2, 0:6, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 1:2, 0:14, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 2:2, 0:30, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 3:2, 0:62, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'HEADERS frames payload CANNOT be 0 bytes wide. (RFC9114 7.1, RFC9114 7.2.2)'};
+parse(<<1, 0:2, Len:6, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 1:2, Len:14, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 2:2, Len:30, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+parse(<<1, 3:2, Len:62, EncodedFieldSection:Len/binary, Rest/bits>>) ->
+ {ok, {headers, EncodedFieldSection}, Rest};
+%%
+%% CANCEL_PUSH frames.
+%%
+parse(<<3, 0:2, 1:6, 0:2, PushID:6, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 2:6, 1:2, PushID:14, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 4:6, 2:2, PushID:30, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, 0:2, 8:6, 3:2, PushID:62, Rest/bits>>) ->
+ {ok, {cancel_push, PushID}, Rest};
+parse(<<3, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'CANCEL_PUSH frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.3)'};
+%%
+%% SETTINGS frames.
+%%
+parse(<<4, 0:2, Len:6, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 1:2, Len:14, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 2:2, Len:30, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+parse(<<4, 3:2, Len:62, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_settings_id(Rest, Len, #{});
+%%
+%% PUSH_PROMISE frames.
+%%
+parse(<<5, 0:2, Len:6, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 1:2, Len:14, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 2:2, Len:30, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+parse(<<5, 3:2, Len:62, Rest/bits>>) when byte_size(Rest) >= Len ->
+ parse_push_promise(Rest, Len);
+%%
+%% GOAWAY frames.
+%%
+parse(<<7, 0:2, 1:6, 0:2, StreamOrPushID:6, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 2:6, 1:2, StreamOrPushID:14, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 4:6, 2:2, StreamOrPushID:30, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, 8:6, 3:2, StreamOrPushID:62, Rest/bits>>) ->
+ {ok, {goaway, StreamOrPushID}, Rest};
+parse(<<7, 0:2, N:6, _/bits>>) when N =:= 1; N =:= 2; N =:= 4; N =:= 8 ->
+ more;
+parse(<<7, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'GOAWAY frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.6)'};
+%%
+%% MAX_PUSH_ID frames.
+%%
+parse(<<13, 0:2, 1:6, 0:2, PushID:6, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 2:6, 1:2, PushID:14, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 4:6, 2:2, PushID:30, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, 8:6, 3:2, PushID:62, Rest/bits>>) ->
+ {ok, {max_push_id, PushID}, Rest};
+parse(<<13, 0:2, N:6, _/bits>>) when N =:= 1; N =:= 2; N =:= 4; N =:= 8 ->
+ more;
+parse(<<13, _/bits>>) ->
+ {connection_error, h3_frame_error,
+ 'MAX_PUSH_ID frames payload MUST be 1, 2, 4 or 8 bytes wide. (RFC9114 7.1, RFC9114 7.2.6)'};
+%%
+%% HTTP/2 frame types must be rejected.
+%%
+parse(<<2, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 PRIORITY frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<6, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 PING frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<8, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 WINDOW_UPDATE frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+parse(<<9, _/bits>>) ->
+ {connection_error, h3_frame_unexpected,
+ 'HTTP/2 CONTINUATION frame not defined for HTTP/3 must be rejected. (RFC9114 7.2.8)'};
+%%
+%% Unknown frames must be ignored.
+parse(<<0:2, Type:6, 0:2, Len:6, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 1:2, Len:14, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 2:2, Len:30, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<0:2, Type:6, 3:2, Len:62, Rest/bits>>)
+ when Type =:= 10; Type =:= 11; Type =:= 12; Type > 13 ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<1:2, _:14, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<2:2, _:30, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 0:2, Len:6, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 1:2, Len:14, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 2:2, Len:30, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+parse(<<3:2, _:62, 3:2, Len:62, Rest/bits>>) ->
+ parse_ignore(Rest, Len);
+%%
+%% Incomplete frames for those we fully process only.
+%%
+parse(_) ->
+ more.
+
+parse_settings_id(Rest, 0, Settings) ->
+ {ok, {settings, Settings}, Rest};
+parse_settings_id(<<0:2, Identifier:6, Rest/bits>>, Len, Settings) when Len >= 1 ->
+ parse_settings_val(Rest, Len - 1, Settings, Identifier);
+parse_settings_id(<<1:2, Identifier:14, Rest/bits>>, Len, Settings) when Len >= 2 ->
+ parse_settings_val(Rest, Len - 2, Settings, Identifier);
+parse_settings_id(<<2:2, Identifier:30, Rest/bits>>, Len, Settings) when Len >= 4 ->
+ parse_settings_val(Rest, Len - 4, Settings, Identifier);
+parse_settings_id(<<3:2, Identifier:62, Rest/bits>>, Len, Settings) when Len >= 8 ->
+ parse_settings_val(Rest, Len - 8, Settings, Identifier);
+parse_settings_id(_, _, _) ->
+ {connection_error, h3_frame_error,
+ 'SETTINGS payload size exceeds the length given. (RFC9114 7.1, RFC9114 7.2.4)'}.
+
+parse_settings_val(<<0:2, Value:6, Rest/bits>>, Len, Settings, Identifier) when Len >= 1 ->
+ parse_settings_id_val(Rest, Len - 1, Settings, Identifier, Value);
+parse_settings_val(<<1:2, Value:14, Rest/bits>>, Len, Settings, Identifier) when Len >= 2 ->
+ parse_settings_id_val(Rest, Len - 2, Settings, Identifier, Value);
+parse_settings_val(<<2:2, Value:30, Rest/bits>>, Len, Settings, Identifier) when Len >= 4 ->
+ parse_settings_id_val(Rest, Len - 4, Settings, Identifier, Value);
+parse_settings_val(<<3:2, Value:62, Rest/bits>>, Len, Settings, Identifier) when Len >= 8 ->
+ parse_settings_id_val(Rest, Len - 8, Settings, Identifier, Value);
+parse_settings_val(_, _, _, _) ->
+ {connection_error, h3_frame_error,
+ 'SETTINGS payload size exceeds the length given. (RFC9114 7.1, RFC9114 7.2.4)'}.
+
+parse_settings_id_val(Rest, Len, Settings, Identifier, Value) ->
+ case Identifier of
+ %% SETTINGS_QPACK_MAX_TABLE_CAPACITY (RFC9204).
+ 1 ->
+ parse_settings_key_val(Rest, Len, Settings, qpack_max_table_capacity, Value);
+ %% SETTINGS_MAX_FIELD_SECTION_SIZE (RFC9114).
+ 6 ->
+ parse_settings_key_val(Rest, Len, Settings, max_field_section_size, Value);
+ %% SETTINGS_QPACK_BLOCKED_STREAMS (RFC9204).
+ 7 ->
+ parse_settings_key_val(Rest, Len, Settings, qpack_blocked_streams, Value);
+ %% SETTINGS_ENABLE_CONNECT_PROTOCOL (RFC9220).
+ 8 when Value =:= 0 ->
+ parse_settings_key_val(Rest, Len, Settings, enable_connect_protocol, false);
+ 8 when Value =:= 1 ->
+ parse_settings_key_val(Rest, Len, Settings, enable_connect_protocol, true);
+ 8 ->
+ {connection_error, h3_settings_error,
+ 'The SETTINGS_ENABLE_CONNECT_PROTOCOL value MUST be 0 or 1. (RFC9220 3, RFC8441 3)'};
+ _ when Identifier < 6 ->
+ {connection_error, h3_settings_error,
+ 'HTTP/2 setting not defined for HTTP/3 must be rejected. (RFC9114 7.2.4.1)'};
+ %% Unknown settings must be ignored.
+ _ ->
+ parse_settings_id(Rest, Len, Settings)
+ end.
+
+parse_settings_key_val(Rest, Len, Settings, Key, Value) ->
+ case Settings of
+ #{Key := _} ->
+ {connection_error, h3_settings_error,
+ 'A duplicate setting identifier was found. (RFC9114 7.2.4)'};
+ _ ->
+ parse_settings_id(Rest, Len, Settings#{Key => Value})
+ end.
+
+parse_push_promise(<<0:2, PushID:6, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 1)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<1:2, PushID:14, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 2)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<2:2, PushID:30, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 4)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest};
+parse_push_promise(<<3:2, PushID:62, Data/bits>>, Len) ->
+ <<EncodedFieldSection:(Len - 8)/bytes, Rest/bits>> = Data,
+ {ok, {push_promise, PushID, EncodedFieldSection}, Rest}.
+
+%% Large ignored frames could lead to DoS. Users of
+%% this module must limit the size of such frames.
+parse_ignore(Data, Len) ->
+ case Data of
+ <<_:Len/binary, Rest/bits>> ->
+ {ignore, Rest};
+ _ ->
+ {more, ignore, Len - byte_size(Data)}
+ end.
+
+-spec parse_unidi_stream_header(binary())
+ -> {ok, control | push | encoder | decoder, binary()}
+ | {undefined, binary()}.
+
+parse_unidi_stream_header(<<0, Rest/bits>>) ->
+ {ok, control, Rest};
+parse_unidi_stream_header(<<1, Rest/bits>>) ->
+ {ok, push, Rest};
+parse_unidi_stream_header(<<2, Rest/bits>>) ->
+ {ok, encoder, Rest};
+parse_unidi_stream_header(<<3, Rest/bits>>) ->
+ {ok, decoder, Rest};
+parse_unidi_stream_header(<<0:2, _:6, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<1:2, _:14, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<2:2, _:30, Rest/bits>>) ->
+ {undefined, Rest};
+parse_unidi_stream_header(<<3:2, _:62, Rest/bits>>) ->
+ {undefined, Rest}.
+
+-spec code_to_error(non_neg_integer()) -> error().
+
+code_to_error(16#0100) -> h3_no_error;
+code_to_error(16#0101) -> h3_general_protocol_error;
+code_to_error(16#0102) -> h3_internal_error;
+code_to_error(16#0103) -> h3_stream_creation_error;
+code_to_error(16#0104) -> h3_closed_critical_stream;
+code_to_error(16#0105) -> h3_frame_unexpected;
+code_to_error(16#0106) -> h3_frame_error;
+code_to_error(16#0107) -> h3_excessive_load;
+code_to_error(16#0108) -> h3_id_error;
+code_to_error(16#0109) -> h3_settings_error;
+code_to_error(16#010a) -> h3_missing_settings;
+code_to_error(16#010b) -> h3_request_rejected;
+code_to_error(16#010c) -> h3_request_cancelled;
+code_to_error(16#010d) -> h3_request_incomplete;
+code_to_error(16#010e) -> h3_message_error;
+code_to_error(16#010f) -> h3_connect_error;
+code_to_error(16#0110) -> h3_version_fallback;
+%% Unknown/reserved error codes must be treated
+%% as equivalent to H3_NO_ERROR.
+code_to_error(_) -> h3_no_error.
+
+%% Building.
+
+-spec data(iodata()) -> iolist().
+
+data(Data) ->
+ Len = encode_int(iolist_size(Data)),
+ [<<0:8>>, Len, Data].
+
+-spec headers(iodata()) -> iolist().
+
+headers(HeaderBlock) ->
+ Len = encode_int(iolist_size(HeaderBlock)),
+ [<<1:8>>, Len, HeaderBlock].
+
+-spec settings(settings()) -> iolist().
+
+settings(Settings) when Settings =:= #{} ->
+ <<4:8, 0:8>>;
+settings(Settings) ->
+ Payload = settings_payload(Settings),
+ Len = encode_int(iolist_size(Payload)),
+ [<<4:8>>, Len, Payload].
+
+settings_payload(Settings) ->
+ Payload = [case Key of
+ %% SETTINGS_QPACK_MAX_TABLE_CAPACITY (RFC9204).
+ qpack_max_table_capacity when Value =:= 0 -> <<>>;
+ qpack_max_table_capacity -> [encode_int(1), encode_int(Value)];
+ %% SETTINGS_MAX_FIELD_SECTION_SIZE (RFC9114).
+ max_header_list_size when Value =:= infinity -> <<>>;
+ max_header_list_size -> [encode_int(6), encode_int(Value)];
+ %% SETTINGS_QPACK_BLOCKED_STREAMS (RFC9204).
+ qpack_blocked_streams when Value =:= 0 -> <<>>;
+ qpack_blocked_streams -> [encode_int(1), encode_int(Value)];
+ %% SETTINGS_ENABLE_CONNECT_PROTOCOL (RFC9220).
+ enable_connect_protocol when Value -> [encode_int(8), encode_int(1)];
+ enable_connect_protocol -> [encode_int(8), encode_int(0)]
+ end || {Key, Value} <- maps:to_list(Settings)],
+ %% Include one reserved identifier in addition.
+ ReservedType = 16#1f * (rand:uniform(148764065110560900) - 1) + 16#21,
+ [encode_int(ReservedType), encode_int(rand:uniform(15384) - 1)|Payload].
+
+-spec error_to_code(error()) -> non_neg_integer().
+
+error_to_code(h3_no_error) ->
+ %% Implementations should select a reserved error code
+ %% with some probability when they would have sent H3_NO_ERROR. (RFC9114 8.1)
+ case rand:uniform(2) of
+ 1 -> 16#0100;
+ 2 -> 16#1f * (rand:uniform(148764065110560900) - 1) + 16#21
+ end;
+error_to_code(h3_general_protocol_error) -> 16#0101;
+error_to_code(h3_internal_error) -> 16#0102;
+error_to_code(h3_stream_creation_error) -> 16#0103;
+error_to_code(h3_closed_critical_stream) -> 16#0104;
+error_to_code(h3_frame_unexpected) -> 16#0105;
+error_to_code(h3_frame_error) -> 16#0106;
+error_to_code(h3_excessive_load) -> 16#0107;
+error_to_code(h3_id_error) -> 16#0108;
+error_to_code(h3_settings_error) -> 16#0109;
+error_to_code(h3_missing_settings) -> 16#010a;
+error_to_code(h3_request_rejected) -> 16#010b;
+error_to_code(h3_request_cancelled) -> 16#010c;
+error_to_code(h3_request_incomplete) -> 16#010d;
+error_to_code(h3_message_error) -> 16#010e;
+error_to_code(h3_connect_error) -> 16#010f;
+error_to_code(h3_version_fallback) -> 16#0110.
+
+-spec encode_int(0..16#3fffffffffffffff) -> binary().
+
+encode_int(I) when I < 64 ->
+ <<0:2, I:6>>;
+encode_int(I) when I < 16384 ->
+ <<1:2, I:14>>;
+encode_int(I) when I < 1073741824 ->
+ <<2:2, I:30>>;
+encode_int(I) when I < 4611686018427387904 ->
+ <<3:2, I:62>>.
diff --git a/src/cow_http3_machine.erl b/src/cow_http3_machine.erl
new file mode 100644
index 0000000..b1b4a68
--- /dev/null
+++ b/src/cow_http3_machine.erl
@@ -0,0 +1,721 @@
+%% Copyright (c) 2023-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http3_machine).
+
+-export([init/2]).
+-export([init_unidi_local_streams/4]).
+-export([init_unidi_stream/3]).
+-export([set_unidi_remote_stream_type/3]).
+-export([init_bidi_stream/2]).
+-export([init_bidi_stream/3]).
+-export([close_bidi_stream_for_sending/2]).
+-export([close_stream/2]).
+-export([unidi_data/4]).
+-export([frame/4]).
+-export([ignored_frame/2]).
+-export([prepare_headers/5]).
+-export([prepare_trailers/3]).
+-export([reset_stream/2]).
+-export([get_bidi_stream_local_state/2]).
+-export([get_bidi_stream_remote_state/2]).
+
+-type opts() :: #{
+ enable_connect_protocol => boolean(),
+ max_decode_blocked_streams => 0..16#3fffffffffffffff,
+ max_decode_table_size => 0..16#3fffffffffffffff,
+ max_encode_blocked_streams => 0..16#3fffffffffffffff,
+ max_encode_table_size => 0..16#3fffffffffffffff
+}.
+-export_type([opts/0]).
+
+-type unidi_stream_dir() :: unidi_local | unidi_remote.
+-type unidi_stream_type() :: control | push | encoder | decoder.
+
+-record(unidi_stream, {
+ id :: cow_http3:stream_id(),
+
+ %% Unidi stream direction (local = we initiated).
+ dir :: unidi_stream_dir(),
+
+ %% Unidi stream type.
+ type :: undefined | unidi_stream_type()
+}).
+
+-record(bidi_stream, {
+ id :: cow_http3:stream_id(),
+
+ %% Request method.
+ method = undefined :: undefined | binary(),
+
+ %% Whether we finished sending data.
+ local = idle :: idle | cow_http:fin(),
+
+ %% Whether we finished receiving data.
+ remote = idle :: idle | cow_http:fin(),
+
+ %% Size expected and read from the request body.
+ remote_expected_size = undefined :: undefined | non_neg_integer(),
+ remote_read_size = 0 :: non_neg_integer(),
+
+ %% Unparsed te header. Used to know if we can send trailers.
+ %% Note that we can always send trailers to the server.
+ te :: undefined | binary()
+}).
+
+-type stream() :: #unidi_stream{} | #bidi_stream{}.
+
+-record(http3_machine, {
+ %% Whether the HTTP/3 endpoint is a client or a server.
+ mode :: client | server,
+
+ %% Current state of the supported unidi streams:
+ %% * the control stream must send SETTINGS as its first frame
+ %% * none of these streams can be closed once they are open
+ peer_control_state = no_stream :: no_stream | no_settings | ready,
+ peer_decode_state = no_stream :: no_stream | ready,
+ peer_encode_state = no_stream :: no_stream | ready,
+
+ %% Maximum Push ID.
+ max_push_id = -1 :: -1 | cow_http3:push_id(),
+
+ %% Settings are separate for each endpoint. They are sent once
+ %% at the beginning of the control stream.
+ local_settings = #{
+% enable_connect_protocol => false
+% max_decode_blocked_streams => 0,
+% max_decode_table_size => 0,
+% max_encode_blocked_streams => 0,
+% max_encode_table_size => 4096
+ } :: map(),
+
+ %% Currently active HTTP/3 streams. Streams may be initiated either
+ %% by the client or by the server through PUSH_PROMISE frames.
+ streams = #{} :: #{cow_http3:stream_id() => stream()},
+
+ %% QPACK decoding and encoding state.
+ decode_state :: cow_qpack:state(),
+ encode_state :: cow_qpack:state()
+}).
+
+-opaque http3_machine() :: #http3_machine{}.
+-export_type([http3_machine/0]).
+
+-type instructions() :: undefined
+ | {decoder_instructions | encoder_instructions, iodata()}.
+
+-spec init(client | server, opts())
+ -> {ok, iolist(), http3_machine()}.
+
+init(Mode, Opts) ->
+ Settings = init_settings(Opts),
+ {ok, cow_http3:settings(Settings), #http3_machine{
+ mode=Mode, local_settings=Settings,
+ decode_state=init_decode_state(Opts),
+ encode_state=init_encode_state(Opts)
+ }}.
+
+init_settings(Opts) ->
+ S0 = setting_from_opt(#{}, Opts, max_decode_table_size,
+ qpack_max_table_capacity, 0),
+ S1 = setting_from_opt(S0, Opts, max_decode_blocked_streams,
+ qpack_blocked_streams, 0),
+ %% @todo max_field_section_size
+ setting_from_opt(S1, Opts, enable_connect_protocol,
+ enable_connect_protocol, false).
+
+setting_from_opt(Settings, Opts, OptName, SettingName, Default) ->
+ case maps:get(OptName, Opts, Default) of
+ Default -> Settings;
+ Value -> Settings#{SettingName => Value}
+ end.
+
+%% Note that only the decoder sends them as SETTINGS.
+init_decode_state(Opts) ->
+ MaxTableCapacity = maps:get(max_decode_table_size, Opts, 0),
+ MaxBlockedStreams = maps:get(max_decode_blocked_streams, Opts, 0),
+ cow_qpack:init(decoder, MaxTableCapacity, MaxBlockedStreams).
+
+%% We want to use the dynamic table by default to improve
+%% compression ratio, but we do not allow blocked streams
+%% by default because they could lead to the latency being
+%% worse than otherwise.
+init_encode_state(Opts) ->
+ MaxTableCapacity = maps:get(max_encode_table_size, Opts, 4096),
+ MaxBlockedStreams = maps:get(max_encode_blocked_streams, Opts, 0),
+ cow_qpack:init(encoder, MaxTableCapacity, MaxBlockedStreams).
+
+-spec init_unidi_local_streams(cow_http3:stream_id(), cow_http3:stream_id(),
+ cow_http3:stream_id(), State) -> State when State::http3_machine().
+
+init_unidi_local_streams(ControlID, EncoderID, DecoderID,
+ State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{
+ streams=Streams#{
+ ControlID => #unidi_stream{id=ControlID, dir=unidi_local, type=control},
+ EncoderID => #unidi_stream{id=EncoderID, dir=unidi_local, type=encoder},
+ DecoderID => #unidi_stream{id=DecoderID, dir=unidi_local, type=decoder}
+ }}.
+
+-spec init_unidi_stream(cow_http3:stream_id(), unidi_stream_dir(), State)
+ -> State when State::http3_machine().
+
+init_unidi_stream(StreamID, StreamDir, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{StreamID => #unidi_stream{
+ id=StreamID, dir=StreamDir, type=undefined}}}.
+
+-spec set_unidi_remote_stream_type(cow_http3:stream_id(), unidi_stream_type(), State)
+ -> {ok, State}
+ | {error, {connection_error, h3_stream_creation_error, atom()}, State}
+ when State::http3_machine().
+
+set_unidi_remote_stream_type(StreamID, Type=control,
+ State=#http3_machine{peer_control_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_control_state=no_settings})};
+set_unidi_remote_stream_type(_, control, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two control streams. (RFC9114 6.2.1)'},
+ State};
+set_unidi_remote_stream_type(StreamID, Type=decoder,
+ State=#http3_machine{peer_decode_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_decode_state=ready})};
+set_unidi_remote_stream_type(StreamID, Type=encoder,
+ State=#http3_machine{peer_encode_state=no_stream}) ->
+ Stream = stream_get(StreamID, State),
+ {ok, stream_store(Stream#unidi_stream{type=Type},
+ State#http3_machine{peer_encode_state=ready})};
+set_unidi_remote_stream_type(_, decoder, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two decoder streams. (RFC9204 4.2)'},
+ State};
+set_unidi_remote_stream_type(_, encoder, State) ->
+ {error, {connection_error, h3_stream_creation_error,
+ 'A peer cannot open two encoder streams. (RFC9204 4.2)'},
+ State}.
+
+%% All bidi streams are request/response.
+%% We only need to know the method when in client mode.
+
+-spec init_bidi_stream(cow_http3:stream_id(), State)
+ -> State when State::http3_machine().
+
+init_bidi_stream(StreamID, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{
+ StreamID => #bidi_stream{id=StreamID}
+ }}.
+
+-spec init_bidi_stream(cow_http3:stream_id(), binary(), State)
+ -> State when State::http3_machine().
+
+init_bidi_stream(StreamID, Method, State=#http3_machine{streams=Streams}) ->
+ State#http3_machine{streams=Streams#{
+ StreamID => #bidi_stream{id=StreamID, method=Method}
+ }}.
+
+-spec close_bidi_stream_for_sending(cow_http3:stream_id(), State)
+ -> State when State::http3_machine().
+
+close_bidi_stream_for_sending(StreamID, State=#http3_machine{streams=Streams}) ->
+ #{StreamID := Stream} = Streams,
+ stream_store(Stream#bidi_stream{local=fin}, State).
+
+-spec close_stream(cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {error, {connection_error, h3_closed_critical_stream, atom()}, State}
+ when State::http3_machine().
+
+close_stream(StreamID, State=#http3_machine{streams=Streams0}) ->
+ case maps:take(StreamID, Streams0) of
+ {#unidi_stream{type=control}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'A control stream was closed. (RFC9114 6.2.1)'},
+ State#http3_machine{streams=Streams}};
+ {#unidi_stream{type=decoder}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'A decoder stream was closed. (RFC9204 4.2)'},
+ State#http3_machine{streams=Streams}};
+ {#unidi_stream{type=encoder}, Streams} ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'An encoder stream was closed. (RFC9204 4.2)'},
+ State#http3_machine{streams=Streams}};
+ {_, Streams} ->
+ {ok, State#http3_machine{streams=Streams}}
+ end.
+
+-spec unidi_data(binary(), cow_http:fin(), cow_http3:stream_id(), State)
+ -> {ok, instructions(), State}
+ | {error, {connection_error, cow_qpack:error(), atom()}, State}
+ when State::http3_machine().
+
+%% All currently supported unidi streams are critical.
+unidi_data(_, fin, _, State) ->
+ {error, {connection_error, h3_closed_critical_stream,
+ 'The FIN flag was set on an encoder or decoder stream. (RFC9204 4.2)'},
+ State};
+unidi_data(Data, nofin, StreamID, State=#http3_machine{
+ decode_state=DecState0, encode_state=EncState0}) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=decoder} ->
+ case cow_qpack:execute_decoder_instructions(Data, EncState0) of
+ {ok, EncState} ->
+ {ok, undefined, State#http3_machine{encode_state=EncState}};
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ end;
+ #unidi_stream{type=encoder} ->
+ case cow_qpack:execute_encoder_instructions(Data, DecState0) of
+ {ok, <<>>, DecState} ->
+ {ok, undefined, State#http3_machine{decode_state=DecState}};
+ {ok, DecData, DecState} ->
+ {ok, {decoder_instructions, DecData},
+ State#http3_machine{decode_state=DecState}};
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ end
+ end.
+
+-spec frame(cow_http3:frame(), cow_http:fin(), cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {ok, {data, binary()}, State}
+ | {ok, {headers, cow_http:headers(), cow_http:pseudo_headers(),
+ non_neg_integer() | undefined}, instructions(), State}
+ | {ok, {trailers, cow_http:headers()}, instructions(), State}
+ | {ok, {goaway, cow_http3:stream_id() | cow_http3:push_id()}, State}
+ | {error, {stream_error, h3_message_error, atom()}, instructions(), State}
+ | {error, {connection_error, cow_http3:error() | cow_qpack:error(), atom()}, State}
+ when State::http3_machine().
+
+frame(Frame, IsFin, StreamID, State) ->
+ case element(1, Frame) of
+ data -> data_frame(Frame, IsFin, StreamID, State);
+ headers -> headers_frame(Frame, IsFin, StreamID, State);
+ cancel_push -> cancel_push_frame(Frame, IsFin, StreamID, State);
+ settings -> settings_frame(Frame, IsFin, StreamID, State);
+ push_promise -> push_promise_frame(Frame, IsFin, StreamID, State);
+ goaway -> goaway_frame(Frame, IsFin, StreamID, State);
+ max_push_id -> max_push_id_frame(Frame, IsFin, StreamID, State)
+ end.
+
+%% DATA frame.
+
+data_frame(Frame={data, Data}, IsFin, StreamID, State) ->
+ DataLen = byte_size(Data),
+ case stream_get(StreamID, State) of
+ Stream = #bidi_stream{remote=nofin} ->
+ data_frame(Frame, IsFin, Stream, State, DataLen);
+ #bidi_stream{remote=idle} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA frame received before a HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #bidi_stream{remote=fin} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA frame received after trailer HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+data_frame(Frame, IsFin, Stream0=#bidi_stream{remote_read_size=StreamRead}, State0, DataLen) ->
+ Stream = Stream0#bidi_stream{remote=IsFin,
+ remote_read_size=StreamRead + DataLen},
+ State = stream_store(Stream, State0),
+ case is_body_size_valid(Stream) of
+ true ->
+ {ok, Frame, State}%;
+%% @todo Implement and update error type/message.
+% false ->
+% stream_reset(StreamID, State, protocol_error,
+% 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)')
+ end.
+
+%% It's always valid when no content-length header was specified.
+is_body_size_valid(#bidi_stream{remote_expected_size=undefined}) ->
+ true;
+%% We didn't finish reading the body but the size is already larger than expected.
+is_body_size_valid(#bidi_stream{remote=nofin, remote_expected_size=Expected,
+ remote_read_size=Read}) when Read > Expected ->
+ false;
+is_body_size_valid(#bidi_stream{remote=nofin}) ->
+ true;
+is_body_size_valid(#bidi_stream{remote=fin, remote_expected_size=Expected,
+ remote_read_size=Expected}) ->
+ true;
+%% We finished reading the body and the size read is not the one expected.
+is_body_size_valid(_) ->
+ false.
+
+%% HEADERS frame.
+
+headers_frame(Frame, IsFin, StreamID, State=#http3_machine{mode=Mode}) ->
+ case stream_get(StreamID, State) of
+ %% Headers.
+ Stream=#bidi_stream{remote=idle} ->
+ headers_decode(Frame, IsFin, Stream, State, case Mode of
+ server -> request;
+ client -> response
+ end);
+ %% Trailers.
+ Stream=#bidi_stream{remote=nofin} ->
+ headers_decode(Frame, IsFin, Stream, State, trailers);
+ %% Additional frame received after trailers.
+ #bidi_stream{remote=fin} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'HEADERS frame received after trailer HEADERS frame. (RFC9114 4.1)'},
+ State};
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+headers_decode({headers, EncodedFieldSection}, IsFin, Stream=#bidi_stream{id=StreamID},
+ State=#http3_machine{decode_state=DecodeState0}, Type) ->
+ try cow_qpack:decode_field_section(EncodedFieldSection, StreamID, DecodeState0) of
+ {ok, Headers, DecData, DecodeState} ->
+ headers_process(Stream,
+ State#http3_machine{decode_state=DecodeState}, IsFin, Type, DecData, Headers);
+ Error = {connection_error, _, _} ->
+ {error, Error, State}
+ catch _:_ ->
+ {error, {connection_error, qpack_decompression_failed,
+ 'Exception while trying to decode QPACK-encoded header block. (RFC9204 2.2)'},
+ State}
+ end.
+
+headers_process(Stream=#bidi_stream{method=ReqMethod},
+ State=#http3_machine{local_settings=LocalSettings},
+ IsFin, Type, DecData, Headers0) ->
+ case cow_http:process_headers(Headers0, Type, ReqMethod, IsFin, LocalSettings) of
+ {headers, Headers, PseudoHeaders, Len} ->
+ headers_frame(Stream, State, IsFin, Type, DecData, Headers, PseudoHeaders, Len);
+% {push_promise, Headers, PseudoHeaders} -> %% @todo Implement push promises.
+ {trailers, Headers} ->
+ trailers_frame(Stream, State, DecData, Headers);
+ {error, Reason} ->
+ {error, {stream_error, h3_message_error, format_error(Reason)},
+ %% We decoded the headers so must send the instructions if any.
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}
+ end.
+
+headers_frame(Stream0, State0, IsFin, Type, DecData, Headers, PseudoHeaders, Len) ->
+ Stream = case Type of
+ request ->
+ TE = case lists:keyfind(<<"te">>, 1, Headers) of
+ {_, TE0} -> TE0;
+ false -> undefined
+ end,
+ Stream0#bidi_stream{method=maps:get(method, PseudoHeaders),
+ remote=IsFin, remote_expected_size=Len, te=TE};
+ response ->
+ case PseudoHeaders of
+ #{status := Status} when Status >= 100, Status =< 199 -> Stream0;
+ _ -> Stream0#bidi_stream{remote=IsFin, remote_expected_size=Len}
+ end
+ end,
+ State = stream_store(Stream, State0),
+ {ok, {headers, Headers, PseudoHeaders, Len},
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}.
+
+trailers_frame(Stream0, State0, DecData, Headers) ->
+ Stream = Stream0#bidi_stream{remote=fin},
+ State = stream_store(Stream, State0),
+ %% @todo Error out if we didn't get the full body.
+ case is_body_size_valid(Stream) of
+ true ->
+ {ok, {trailers, Headers},
+ case DecData of
+ <<>> -> undefined;
+ _ -> {decoder_instructions, DecData}
+ end,
+ State}%;
+%% @todo Implement and update error type/message.
+% false ->
+% stream_reset(StreamID, State, protocol_error,
+% 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)')
+ end.
+
+format_error(connect_invalid_pseudo_header) ->
+ 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC9114 4.4)';
+format_error(connect_missing_authority) ->
+ 'CONNECT requests must include the :authority pseudo-header. (RFC9114 4.4)';
+format_error(empty_header_name) ->
+ 'Empty header names are not valid regular headers. (CVE-2019-9516)';
+format_error(extended_connect_missing_protocol) ->
+ 'Extended CONNECT requests must include the :protocol pseudo-header. (RFC9220, RFC8441 4)';
+format_error(invalid_connection_header) ->
+ 'The connection header is not allowed. (RFC9114 4.2)';
+format_error(invalid_keep_alive_header) ->
+ 'The keep-alive header is not allowed. (RFC9114 4.2)';
+format_error(invalid_protocol_pseudo_header) ->
+ 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC9220, RFC8441 4)';
+format_error(invalid_proxy_authenticate_header) ->
+ 'The proxy-authenticate header is not allowed. (RFC9114 4.2)';
+format_error(invalid_proxy_authorization_header) ->
+ 'The proxy-authorization header is not allowed. (RFC9114 4.2)';
+format_error(invalid_pseudo_header) ->
+ 'An unknown or invalid pseudo-header was found. (RFC9114 4.3)';
+format_error(invalid_status_pseudo_header) ->
+ 'The :status pseudo-header value is invalid. (RFC9114 4.3, RFC9114 4.3.2)';
+format_error(invalid_te_header) ->
+ 'The te header is only allowed in request headers. (RFC9114 4.2)';
+format_error(invalid_te_value) ->
+ 'The te header with a value other than "trailers" is not allowed. (RFC9114 4.2)';
+format_error(invalid_transfer_encoding_header) ->
+ 'The transfer-encoding header is not allowed. (RFC9114 4.1)';
+format_error(invalid_upgrade_header) ->
+ 'The upgrade header is not allowed. (RFC9114 4.2)';
+format_error(missing_pseudo_header) ->
+ 'A required pseudo-header was not found. (RFC9114 4.3.1, RFC9114 4.3.2)';
+format_error(multiple_authority_pseudo_headers) ->
+ 'Multiple :authority pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_method_pseudo_headers) ->
+ 'Multiple :method pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_path_pseudo_headers) ->
+ 'Multiple :path pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_protocol_pseudo_headers) ->
+ 'Multiple :protocol pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_scheme_pseudo_headers) ->
+ 'Multiple :scheme pseudo-headers were found. (RFC9114 4.3.1)';
+format_error(multiple_status_pseudo_headers) ->
+ 'Multiple :status pseudo-headers were found. (RFC9114 4.3.2)';
+format_error(non_zero_length_with_fin_flag) ->
+ 'HEADERS frame with the FIN flag contains a non-zero content-length. (RFC9114 4.1.2)';
+format_error(pseudo_header_after_regular) ->
+ 'Pseudo-headers were found after regular headers. (RFC9114 4.3)';
+format_error(trailer_invalid_pseudo_header) ->
+ 'Trailer header blocks must not contain pseudo-headers. (RFC9114 4.3)';
+format_error(uppercase_header_name) ->
+ 'Header names must be lowercase. (RFC9114 4.1.2, RFC9114 4.2)';
+format_error(Reason) ->
+ cow_http:format_semantic_error(Reason).
+
+cancel_push_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+settings_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The SETTINGS frame is not allowed on a bidi stream. (RFC9114 7.2.4)'},
+ State}
+ end.
+
+push_promise_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State)
+ end.
+
+goaway_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The GOAWAY frame is not allowed on a bidi stream. (RFC9114 7.2.6)'},
+ State}
+ end.
+
+max_push_id_frame(Frame, _IsFin, StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(Frame, State);
+ #bidi_stream{} ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The MAX_PUSH_ID frame is not allowed on a bidi stream. (RFC9114 7.2.7)'},
+ State}
+ end.
+
+control_frame({settings, Settings}, State=#http3_machine{
+ peer_control_state=no_settings, encode_state=EncState0}) ->
+ %% @todo max_field_section_size
+ %% Send the QPACK values to the encoder.
+ MaxTableCapacity = maps:get(qpack_max_table_capacity, Settings, 0),
+ MaxBlockedStreams = maps:get(qpack_blocked_streams, Settings, 0),
+ EncState = cow_qpack:encoder_set_settings(MaxTableCapacity, MaxBlockedStreams, EncState0),
+ {ok, State#http3_machine{peer_control_state=ready, encode_state=EncState}};
+control_frame({settings, _}, State) ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'The SETTINGS frame cannot be sent more than once. (RFC9114 7.2.4)'},
+ State};
+control_frame(_Frame, State=#http3_machine{peer_control_state=no_settings}) ->
+ {error, {connection_error, h3_missing_settings,
+ 'The first frame on the control stream must be a SETTINGS frame. (RFC9114 6.2.1)'},
+ State};
+control_frame(Frame = {goaway, _}, State) ->
+ {ok, Frame, State};
+%% @todo Implement server push.
+control_frame({max_push_id, PushID}, State=#http3_machine{max_push_id=MaxPushID}) ->
+ if
+ PushID >= MaxPushID ->
+ {ok, State#http3_machine{max_push_id=PushID}};
+ true ->
+ {error, {connection_error, h3_id_error,
+ 'MAX_PUSH_ID must not be lower than previously received. (RFC9114 7.2.7)'},
+ State}
+ end;
+control_frame(ignored_frame, State) ->
+ {ok, State};
+control_frame(_Frame, State) ->
+ {error, {connection_error, h3_frame_unexpected,
+ 'DATA and HEADERS frames are not allowed on the control stream. (RFC9114 7.2.1, RFC9114 7.2.2)'},
+ State}.
+
+%% Ignored frames.
+
+-spec ignored_frame(cow_http3:stream_id(), State)
+ -> {ok, State}
+ | {error, {connection_error, cow_http3:error(), atom()}, State}
+ when State::http3_machine().
+
+ignored_frame(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #unidi_stream{type=control} ->
+ control_frame(ignored_frame, State);
+ _ ->
+ {ok, State}
+ end.
+
+%% Functions for sending a message header or body. Note that
+%% this module does not send data directly, instead it returns
+%% a value that can then be used to send the frames.
+
+-spec prepare_headers(cow_http3:stream_id(), State,
+ idle | cow_http:fin(), cow_http:pseudo_headers(), cow_http:headers())
+ -> {ok, cow_http:fin(), iodata(), instructions(), State} when State::http3_machine().
+
+prepare_headers(StreamID, State=#http3_machine{encode_state=EncodeState0},
+ IsFin0, PseudoHeaders, Headers0) ->
+ Stream = #bidi_stream{method=Method, local=idle} = stream_get(StreamID, State),
+ IsFin = case {IsFin0, Method} of
+ {idle, _} -> nofin;
+ {_, <<"HEAD">>} -> fin;
+ _ -> IsFin0
+ end,
+ %% With QUIC we don't have a data queue so the local state
+ %% can be updated immediately.
+ LocalIsFin = case IsFin0 of
+ idle -> idle;
+ _ -> IsFin
+ end,
+ Headers = cow_http:merge_pseudo_headers(PseudoHeaders,
+ cow_http:remove_http1_headers(Headers0)),
+ {ok, HeaderBlock, EncData, EncodeState}
+ = cow_qpack:encode_field_section(Headers, StreamID, EncodeState0),
+ {ok, IsFin, HeaderBlock,
+ case EncData of
+ [] -> undefined;
+ _ -> {encoder_instructions, EncData}
+ end,
+ stream_store(Stream#bidi_stream{local=LocalIsFin},
+ State#http3_machine{encode_state=EncodeState})}.
+
+-spec prepare_trailers(cow_http3:stream_id(), State, cow_http:headers())
+ -> {trailers, iodata(), instructions(), State}
+ | {no_trailers, State}
+ when State::http3_machine().
+
+prepare_trailers(StreamID, State=#http3_machine{encode_state=EncodeState0}, Trailers) ->
+ Stream = #bidi_stream{local=nofin, te=TE0} = stream_get(StreamID, State),
+ TE = try cow_http_hd:parse_te(TE0) of
+ {trailers, []} -> trailers;
+ _ -> no_trailers
+ catch _:_ ->
+ %% If we can't parse the TE header, assume we can't send trailers.
+ no_trailers
+ end,
+ case TE of
+ trailers ->
+ {ok, HeaderBlock, EncData, EncodeState}
+ = cow_qpack:encode_field_section(Trailers, StreamID, EncodeState0),
+ {trailers, HeaderBlock,
+ case EncData of
+ [] -> undefined;
+ _ -> {encoder_instructions, EncData}
+ end,
+ stream_store(Stream#bidi_stream{local=fin},
+ State#http3_machine{encode_state=EncodeState})};
+ no_trailers ->
+ {no_trailers, stream_store(Stream#bidi_stream{local=fin}, State)}
+ end.
+
+%% Public interface to reset streams.
+
+-spec reset_stream(cow_http3:stream_id(), State)
+ -> {ok, State} | {error, not_found} when State::http3_machine().
+
+reset_stream(StreamID, State=#http3_machine{streams=Streams0}) ->
+ case maps:take(StreamID, Streams0) of
+ {_, Streams} ->
+ {ok, State#http3_machine{streams=Streams}};
+ error ->
+ {error, not_found}
+ end.
+
+%% Retrieve the local state for a bidi stream.
+
+-spec get_bidi_stream_local_state(cow_http3:stream_id(), http3_machine())
+ -> {ok, idle | cow_http:fin()} | {error, not_found}.
+
+get_bidi_stream_local_state(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #bidi_stream{local=IsFin} ->
+ {ok, IsFin};
+ %% Stream may never have been opened, or could have
+ %% already been closed.
+ undefined ->
+ {error, not_found}
+ end.
+
+%% Retrieve the remote state for a bidi stream.
+
+-spec get_bidi_stream_remote_state(cow_http3:stream_id(), http3_machine())
+ -> {ok, idle | cow_http:fin()} | {error, not_found}.
+
+get_bidi_stream_remote_state(StreamID, State) ->
+ case stream_get(StreamID, State) of
+ #bidi_stream{remote=IsFin} ->
+ {ok, IsFin};
+ %% Stream may never have been opened, or could have
+ %% already been closed.
+ undefined ->
+ {error, not_found}
+ end.
+
+%% Stream-related functions.
+
+stream_get(StreamID, #http3_machine{streams=Streams}) ->
+ maps:get(StreamID, Streams, undefined).
+
+stream_store(Stream, State=#http3_machine{streams=Streams}) ->
+ StreamID = case Stream of
+ #bidi_stream{id=StreamID0} -> StreamID0;
+ #unidi_stream{id=StreamID0} -> StreamID0
+ end,
+ State#http3_machine{streams=Streams#{StreamID => Stream}}.
diff --git a/src/cow_http_hd.erl b/src/cow_http_hd.erl
index e2a0a1d..f0e4fba 100644
--- a/src/cow_http_hd.erl
+++ b/src/cow_http_hd.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2014-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2014-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -1169,7 +1169,8 @@ cache_directive(<< $=, $", R/bits >>, Acc, T)
cache_directive_fields_list(R, Acc, T, []);
cache_directive(<< $=, C, R/bits >>, Acc, T)
when ?IS_DIGIT(C), (T =:= <<"max-age">>) or (T =:= <<"max-stale">>)
- or (T =:= <<"min-fresh">>) or (T =:= <<"s-maxage">>) ->
+ or (T =:= <<"min-fresh">>) or (T =:= <<"s-maxage">>)
+ or (T =:= <<"stale-while-revalidate">>) or (T =:= <<"stale-if-error">>) ->
cache_directive_delta(R, Acc, T, (C - $0));
cache_directive(<< $=, $", R/bits >>, Acc, T) -> cache_directive_quoted_string(R, Acc, T, <<>>);
cache_directive(<< $=, C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> cache_directive_token(R, Acc, T, << C >>);
@@ -1211,14 +1212,18 @@ cache_directive_unreserved_token() ->
?SUCHTHAT(T,
token(),
T =/= <<"max-age">> andalso T =/= <<"max-stale">> andalso T =/= <<"min-fresh">>
- andalso T =/= <<"s-maxage">> andalso T =/= <<"no-cache">> andalso T =/= <<"private">>).
+ andalso T =/= <<"s-maxage">> andalso T =/= <<"no-cache">> andalso T =/= <<"private">>
+ andalso T =/= <<"stale-while-revalidate">> andalso T =/= <<"stale-if-error">>).
cache_directive() ->
oneof([
token(),
{cache_directive_unreserved_token(), token()},
{cache_directive_unreserved_token(), quoted_string()},
- {elements([<<"max-age">>, <<"max-stale">>, <<"min-fresh">>, <<"s-maxage">>]), non_neg_integer()},
+ {elements([
+ <<"max-age">>, <<"max-stale">>, <<"min-fresh">>, <<"s-maxage">>,
+ <<"stale-while-revalidate">>, <<"stale-if-error">>
+ ]), non_neg_integer()},
{fields, elements([<<"no-cache">>, <<"private">>]), small_list(token())}
]).
@@ -1260,7 +1265,13 @@ parse_cache_control_test_() ->
{<<"max-age=30">>, [{<<"max-age">>, 30}]},
{<<"private, community=\"UCI\"">>, [<<"private">>, {<<"community">>, <<"UCI">>}]},
{<<"private=\"Content-Type, Content-Encoding, Content-Language\"">>,
- [{<<"private">>, [<<"content-type">>, <<"content-encoding">>, <<"content-language">>]}]}
+ [{<<"private">>, [<<"content-type">>, <<"content-encoding">>, <<"content-language">>]}]},
+ %% RFC5861 3.1.
+ {<<"max-age=600, stale-while-revalidate=30">>,
+ [{<<"max-age">>, 600}, {<<"stale-while-revalidate">>, 30}]},
+ %% RFC5861 4.1.
+ {<<"max-age=600, stale-if-error=1200">>,
+ [{<<"max-age">>, 600}, {<<"stale-if-error">>, 1200}]}
],
[{V, fun() -> R = parse_cache_control(V) end} || {V, R} <- Tests].
@@ -3227,11 +3238,11 @@ parse_upgrade_error_test_() ->
parse_variant_key(VariantKey, NumMembers) ->
List = cow_http_struct_hd:parse_list(VariantKey),
[case Inner of
- {with_params, InnerList, #{}} ->
+ {list, InnerList, []} ->
NumMembers = length(InnerList),
[case Item of
- {with_params, {token, Value}, #{}} -> Value;
- {with_params, {string, Value}, #{}} -> Value
+ {item, {token, Value}, []} -> Value;
+ {item, {string, Value}, []} -> Value
end || Item <- InnerList]
end || Inner <- List].
@@ -3261,9 +3272,9 @@ parse_variant_key_error_test_() ->
%% We assume that the lists are of correct length.
variant_key(VariantKeys) ->
cow_http_struct_hd:list([
- {with_params, [
- {with_params, {string, Value}, #{}}
- || Value <- InnerList], #{}}
+ {list, [
+ {item, {string, Value}, []}
+ || Value <- InnerList], []}
|| InnerList <- VariantKeys]).
-ifdef(TEST).
@@ -3287,14 +3298,14 @@ variant_key_identity_test_() ->
-spec parse_variants(binary()) -> [{binary(), [binary()]}].
parse_variants(Variants) ->
- {Dict0, Order} = cow_http_struct_hd:parse_dictionary(Variants),
- Dict = maps:map(fun(_, {with_params, List, #{}}) ->
- [case Item of
- {with_params, {token, Value}, #{}} -> Value;
- {with_params, {string, Value}, #{}} -> Value
- end || Item <- List]
- end, Dict0),
- [{Key, maps:get(Key, Dict)} || Key <- Order].
+ Dict = cow_http_struct_hd:parse_dictionary(Variants),
+ [case DictItem of
+ {Key, {list, List, []}} ->
+ {Key, [case Item of
+ {item, {token, Value}, []} -> Value;
+ {item, {string, Value}, []} -> Value
+ end || Item <- List]}
+ end || DictItem <- Dict].
-ifdef(TEST).
parse_variants_test_() ->
@@ -3317,9 +3328,9 @@ parse_variants_test_() ->
-spec variants([{binary(), [binary()]}]) -> iolist().
variants(Variants) ->
cow_http_struct_hd:dictionary([
- {Key, {with_params, [
- {with_params, {string, Value}, #{}}
- || Value <- List], #{}}}
+ {Key, {list, [
+ {item, {string, Value}, []}
+ || Value <- List], []}}
|| {Key, List} <- Variants]).
-ifdef(TEST).
@@ -3383,29 +3394,19 @@ www_auth_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> www_auth_list(R, Acc
www_auth_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) ->
?LOWER(www_auth_scheme, R, Acc, <<>>).
-www_auth_basic_before_realm(<< C, R/bits >>, Acc) when ?IS_WS(C) -> www_auth_basic_before_realm(R, Acc);
-www_auth_basic_before_realm(<< "realm=\"", R/bits >>, Acc) -> www_auth_basic(R, Acc, <<>>).
-
-www_auth_basic(<< $", R/bits >>, Acc, Realm) -> www_auth_list_sep(R, [{basic, Realm}|Acc]);
-www_auth_basic(<< $\\, C, R/bits >>, Acc, Realm) when ?IS_VCHAR_OBS(C) -> www_auth_basic(R, Acc, << Realm/binary, C >>);
-www_auth_basic(<< C, R/bits >>, Acc, Realm) when ?IS_VCHAR_OBS(C) -> www_auth_basic(R, Acc, << Realm/binary, C >>).
-
-www_auth_scheme(<< C, R/bits >>, Acc, Scheme) when ?IS_WS(C) ->
- case Scheme of
- <<"basic">> -> www_auth_basic_before_realm(R, Acc);
- <<"bearer">> -> www_auth_params_list(R, Acc, bearer, []);
- <<"digest">> -> www_auth_params_list(R, Acc, digest, []);
- _ -> www_auth_params_list(R, Acc, Scheme, [])
- end;
+www_auth_scheme(<< C, R/bits >>, Acc, Scheme0) when ?IS_WS(C) ->
+ Scheme = case Scheme0 of
+ <<"basic">> -> basic;
+ <<"bearer">> -> bearer;
+ <<"digest">> -> digest;
+ _ -> Scheme0
+ end,
+ www_auth_params_list(R, Acc, Scheme, []);
www_auth_scheme(<< C, R/bits >>, Acc, Scheme) when ?IS_TOKEN(C) ->
?LOWER(www_auth_scheme, R, Acc, Scheme).
-www_auth_list_sep(<<>>, Acc) -> lists:reverse(Acc);
-www_auth_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> www_auth_list_sep(R, Acc);
-www_auth_list_sep(<< $,, R/bits >>, Acc) -> www_auth_list(R, Acc).
-
www_auth_params_list(<<>>, Acc, Scheme, Params) ->
- lists:reverse([{Scheme, lists:reverse(nonempty(Params))}|Acc]);
+ lists:reverse([www_auth_tuple(Scheme, nonempty(Params))|Acc]);
www_auth_params_list(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS_COMMA(C) ->
www_auth_params_list(R, Acc, Scheme, Params);
www_auth_params_list(<< "algorithm=", C, R/bits >>, Acc, Scheme, Params) when ?IS_TOKEN(C) ->
@@ -3442,7 +3443,7 @@ www_auth_param(<< $=, C, R/bits >>, Acc, Scheme, Params, K) when ?IS_TOKEN(C) ->
www_auth_param(<< C, R/bits >>, Acc, Scheme, Params, K) when ?IS_TOKEN(C) ->
?LOWER(www_auth_param, R, Acc, Scheme, Params, K);
www_auth_param(R, Acc, Scheme, Params, NewScheme) ->
- www_auth_scheme(R, [{Scheme, lists:reverse(Params)}|Acc], NewScheme).
+ www_auth_scheme(R, [www_auth_tuple(Scheme, Params)|Acc], NewScheme).
www_auth_token(<< C, R/bits >>, Acc, Scheme, Params, K, V) when ?IS_TOKEN(C) ->
www_auth_token(R, Acc, Scheme, Params, K, << V/binary, C >>);
@@ -3457,19 +3458,26 @@ www_auth_quoted(<< C, R/bits >>, Acc, Scheme, Params, K, V) when ?IS_VCHAR_OBS(C
www_auth_quoted(R, Acc, Scheme, Params, K, << V/binary, C >>).
www_auth_params_list_sep(<<>>, Acc, Scheme, Params) ->
- lists:reverse([{Scheme, lists:reverse(Params)}|Acc]);
+ lists:reverse([www_auth_tuple(Scheme, Params)|Acc]);
www_auth_params_list_sep(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS(C) ->
www_auth_params_list_sep(R, Acc, Scheme, Params);
www_auth_params_list_sep(<< $,, R/bits >>, Acc, Scheme, Params) ->
www_auth_params_list_after_sep(R, Acc, Scheme, Params).
www_auth_params_list_after_sep(<<>>, Acc, Scheme, Params) ->
- lists:reverse([{Scheme, lists:reverse(Params)}|Acc]);
+ lists:reverse([www_auth_tuple(Scheme, Params)|Acc]);
www_auth_params_list_after_sep(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS_COMMA(C) ->
www_auth_params_list_after_sep(R, Acc, Scheme, Params);
www_auth_params_list_after_sep(R, Acc, Scheme, Params) ->
www_auth_params_list(R, Acc, Scheme, Params).
+www_auth_tuple(basic, Params) ->
+ %% Unknown parameters MUST be ignored. (RFC7617 2)
+ {<<"realm">>, Realm} = lists:keyfind(<<"realm">>, 1, Params),
+ {basic, Realm};
+www_auth_tuple(Scheme, Params) ->
+ {Scheme, lists:reverse(Params)}.
+
-ifdef(TEST).
parse_www_authenticate_test_() ->
Tests = [
@@ -3496,6 +3504,18 @@ parse_www_authenticate_test_() ->
]}]},
{<<"Basic realm=\"WallyWorld\"">>,
[{basic, <<"WallyWorld">>}]},
+ %% RFC7617 2.1.
+ {<<"Basic realm=\"foo\", charset=\"UTF-8\"">>,
+ [{basic, <<"foo">>}]},
+ %% A real-world example.
+ {<<"Basic realm=\"https://123456789012.dkr.ecr.eu-north-1.amazonaws.com/\",service=\"ecr.amazonaws.com\"">>,
+ [{basic, <<"https://123456789012.dkr.ecr.eu-north-1.amazonaws.com/">>}]},
+ {<<"Bearer realm=\"example\", Basic realm=\"foo\", charset=\"UTF-8\"">>,
+ [{bearer, [{<<"realm">>, <<"example">>}]},
+ {basic, <<"foo">>}]},
+ {<<"Basic realm=\"foo\", foo=\"bar\", charset=\"UTF-8\", Bearer realm=\"example\",foo=\"bar\"">>,
+ [{basic, <<"foo">>},
+ {bearer, [{<<"realm">>, <<"example">>}, {<<"foo">>,<<"bar">>}]}]},
{<<"Digest realm=\"[email protected]\", qop=\"auth,auth-int\", "
"nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", "
"opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"">>,
diff --git a/src/cow_http_struct_hd.erl b/src/cow_http_struct_hd.erl
index 373c8da..a79c691 100644
--- a/src/cow_http_struct_hd.erl
+++ b/src/cow_http_struct_hd.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2019, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2019-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -15,17 +15,18 @@
%% The mapping between Erlang and structured headers types is as follow:
%%
%% List: list()
-%% Dictionary: map()
+%% Inner list: {list, [item()], params()}
+%% Dictionary: [{binary(), item()}]
+%% There is no distinction between empty list and empty dictionary.
+%% Item with parameters: {item, bare_item(), params()}
+%% Parameters: [{binary(), bare_item()}]
%% Bare item: one bare_item() that can be of type:
%% Integer: integer()
-%% Float: float()
+%% Decimal: {decimal, {integer(), integer()}}
%% String: {string, binary()}
%% Token: {token, binary()}
%% Byte sequence: {binary, binary()}
%% Boolean: boolean()
-%% And finally:
-%% Type with Parameters: {with_params, Type, Parameters}
-%% Parameters: [{binary(), bare_item()}]
-module(cow_http_struct_hd).
@@ -39,13 +40,13 @@
-include("cow_parse.hrl").
-type sh_list() :: [sh_item() | sh_inner_list()].
--type sh_inner_list() :: sh_with_params([sh_item()]).
--type sh_params() :: #{binary() => sh_bare_item() | undefined}.
--type sh_dictionary() :: {#{binary() => sh_item() | sh_inner_list()}, [binary()]}.
--type sh_item() :: sh_with_params(sh_bare_item()).
--type sh_bare_item() :: integer() | float() | boolean()
+-type sh_inner_list() :: {list, [sh_item()], sh_params()}.
+-type sh_params() :: [{binary(), sh_bare_item()}].
+-type sh_dictionary() :: [{binary(), sh_item() | sh_inner_list()}].
+-type sh_item() :: {item, sh_bare_item(), sh_params()}.
+-type sh_bare_item() :: integer() | sh_decimal() | boolean()
| {string | token | binary, binary()}.
--type sh_with_params(Type) :: {with_params, Type, sh_params()}.
+-type sh_decimal() :: {decimal, {integer(), integer()}}.
-define(IS_LC_ALPHA(C),
(C =:= $a) or (C =:= $b) or (C =:= $c) or (C =:= $d) or (C =:= $e) or
@@ -60,35 +61,41 @@
-spec parse_dictionary(binary()) -> sh_dictionary().
parse_dictionary(<<>>) ->
- {#{}, []};
-parse_dictionary(<<C,R/bits>>) when ?IS_LC_ALPHA(C) ->
- {Dict, Order, <<>>} = parse_dict_key(R, #{}, [], <<C>>),
- {Dict, Order}.
+ [];
+parse_dictionary(<<C,R/bits>>) when ?IS_LC_ALPHA(C) or (C =:= $*) ->
+ parse_dict_key(R, [], <<C>>).
-parse_dict_key(<<$=,$(,R0/bits>>, Acc, Order, K) ->
- false = maps:is_key(K, Acc),
+parse_dict_key(<<$=,$(,R0/bits>>, Acc, K) ->
{Item, R} = parse_inner_list(R0, []),
- parse_dict_before_sep(R, Acc#{K => Item}, [K|Order]);
-parse_dict_key(<<$=,R0/bits>>, Acc, Order, K) ->
- false = maps:is_key(K, Acc),
+ parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item}));
+parse_dict_key(<<$=,R0/bits>>, Acc, K) ->
{Item, R} = parse_item1(R0),
- parse_dict_before_sep(R, Acc#{K => Item}, [K|Order]);
-parse_dict_key(<<C,R/bits>>, Acc, Order, K)
+ parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item}));
+parse_dict_key(<<C,R/bits>>, Acc, K)
when ?IS_LC_ALPHA(C) or ?IS_DIGIT(C)
- or (C =:= $_) or (C =:= $-) or (C =:= $*) ->
- parse_dict_key(R, Acc, Order, <<K/binary,C>>).
-
-parse_dict_before_sep(<<C,R/bits>>, Acc, Order) when ?IS_WS(C) ->
- parse_dict_before_sep(R, Acc, Order);
-parse_dict_before_sep(<<C,R/bits>>, Acc, Order) when C =:= $, ->
- parse_dict_before_member(R, Acc, Order);
-parse_dict_before_sep(<<>>, Acc, Order) ->
- {Acc, lists:reverse(Order), <<>>}.
-
-parse_dict_before_member(<<C,R/bits>>, Acc, Order) when ?IS_WS(C) ->
- parse_dict_before_member(R, Acc, Order);
-parse_dict_before_member(<<C,R/bits>>, Acc, Order) when ?IS_LC_ALPHA(C) ->
- parse_dict_key(R, Acc, Order, <<C>>).
+ or (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) ->
+ parse_dict_key(R, Acc, <<K/binary,C>>);
+parse_dict_key(<<$;,R0/bits>>, Acc, K) ->
+ {Params, R} = parse_before_param(R0, []),
+ parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, Params}}));
+parse_dict_key(R, Acc, K) ->
+ parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, []}})).
+
+parse_dict_before_sep(<<$\s,R/bits>>, Acc) ->
+ parse_dict_before_sep(R, Acc);
+parse_dict_before_sep(<<$\t,R/bits>>, Acc) ->
+ parse_dict_before_sep(R, Acc);
+parse_dict_before_sep(<<C,R/bits>>, Acc) when C =:= $, ->
+ parse_dict_before_member(R, Acc);
+parse_dict_before_sep(<<>>, Acc) ->
+ Acc.
+
+parse_dict_before_member(<<$\s,R/bits>>, Acc) ->
+ parse_dict_before_member(R, Acc);
+parse_dict_before_member(<<$\t,R/bits>>, Acc) ->
+ parse_dict_before_member(R, Acc);
+parse_dict_before_member(<<C,R/bits>>, Acc) when ?IS_LC_ALPHA(C) or (C =:= $*) ->
+ parse_dict_key(R, Acc, <<C>>).
-spec parse_item(binary()) -> sh_item().
parse_item(Bin) ->
@@ -98,10 +105,10 @@ parse_item(Bin) ->
parse_item1(Bin) ->
case parse_bare_item(Bin) of
{Item, <<$;,R/bits>>} ->
- {Params, Rest} = parse_before_param(R, #{}),
- {{with_params, Item, Params}, Rest};
+ {Params, Rest} = parse_before_param(R, []),
+ {{item, Item, Params}, Rest};
{Item, Rest} ->
- {{with_params, Item, #{}}, Rest}
+ {{item, Item, []}, Rest}
end.
-spec parse_list(binary()) -> sh_list().
@@ -117,86 +124,104 @@ parse_list_member(R0, Acc) ->
{Item, R} = parse_item1(R0),
parse_list_before_sep(R, [Item|Acc]).
-parse_list_before_sep(<<C,R/bits>>, Acc) when ?IS_WS(C) ->
+parse_list_before_sep(<<$\s,R/bits>>, Acc) ->
+ parse_list_before_sep(R, Acc);
+parse_list_before_sep(<<$\t,R/bits>>, Acc) ->
parse_list_before_sep(R, Acc);
parse_list_before_sep(<<$,,R/bits>>, Acc) ->
parse_list_before_member(R, Acc);
parse_list_before_sep(<<>>, Acc) ->
lists:reverse(Acc).
-parse_list_before_member(<<C,R/bits>>, Acc) when ?IS_WS(C) ->
+parse_list_before_member(<<$\s,R/bits>>, Acc) ->
+ parse_list_before_member(R, Acc);
+parse_list_before_member(<<$\t,R/bits>>, Acc) ->
parse_list_before_member(R, Acc);
parse_list_before_member(R, Acc) ->
parse_list_member(R, Acc).
%% Internal.
-parse_inner_list(<<C,R/bits>>, Acc) when ?IS_WS(C) ->
+parse_inner_list(<<$\s,R/bits>>, Acc) ->
parse_inner_list(R, Acc);
parse_inner_list(<<$),$;,R0/bits>>, Acc) ->
- {Params, R} = parse_before_param(R0, #{}),
- {{with_params, lists:reverse(Acc), Params}, R};
+ {Params, R} = parse_before_param(R0, []),
+ {{list, lists:reverse(Acc), Params}, R};
parse_inner_list(<<$),R/bits>>, Acc) ->
- {{with_params, lists:reverse(Acc), #{}}, R};
+ {{list, lists:reverse(Acc), []}, R};
parse_inner_list(R0, Acc) ->
{Item, R = <<C,_/bits>>} = parse_item1(R0),
true = (C =:= $\s) orelse (C =:= $)),
parse_inner_list(R, [Item|Acc]).
-parse_before_param(<<C,R/bits>>, Acc) when ?IS_WS(C) ->
+parse_before_param(<<$\s,R/bits>>, Acc) ->
parse_before_param(R, Acc);
-parse_before_param(<<C,R/bits>>, Acc) when ?IS_LC_ALPHA(C) ->
+parse_before_param(<<C,R/bits>>, Acc) when ?IS_LC_ALPHA(C) or (C =:= $*) ->
parse_param(R, Acc, <<C>>).
parse_param(<<$;,R/bits>>, Acc, K) ->
- parse_before_param(R, Acc#{K => undefined});
+ parse_before_param(R, lists:keystore(K, 1, Acc, {K, true}));
parse_param(<<$=,R0/bits>>, Acc, K) ->
case parse_bare_item(R0) of
{Item, <<$;,R/bits>>} ->
- false = maps:is_key(K, Acc),
- parse_before_param(R, Acc#{K => Item});
+ parse_before_param(R, lists:keystore(K, 1, Acc, {K, Item}));
{Item, R} ->
- false = maps:is_key(K, Acc),
- {Acc#{K => Item}, R}
+ {lists:keystore(K, 1, Acc, {K, Item}), R}
end;
parse_param(<<C,R/bits>>, Acc, K)
when ?IS_LC_ALPHA(C) or ?IS_DIGIT(C)
- or (C =:= $_) or (C =:= $-) or (C =:= $*) ->
+ or (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) ->
parse_param(R, Acc, <<K/binary,C>>);
parse_param(R, Acc, K) ->
- false = maps:is_key(K, Acc),
- {Acc#{K => undefined}, R}.
+ {lists:keystore(K, 1, Acc, {K, true}), R}.
-%% Integer or float.
+%% Integer or decimal.
parse_bare_item(<<$-,R/bits>>) -> parse_number(R, 0, <<$->>);
parse_bare_item(<<C,R/bits>>) when ?IS_DIGIT(C) -> parse_number(R, 1, <<C>>);
%% String.
parse_bare_item(<<$",R/bits>>) -> parse_string(R, <<>>);
%% Token.
-parse_bare_item(<<C,R/bits>>) when ?IS_ALPHA(C) -> parse_token(R, <<C>>);
+parse_bare_item(<<C,R/bits>>) when ?IS_ALPHA(C) or (C =:= $*) -> parse_token(R, <<C>>);
%% Byte sequence.
-parse_bare_item(<<$*,R/bits>>) -> parse_binary(R, <<>>);
+parse_bare_item(<<$:,R/bits>>) -> parse_binary(R, <<>>);
%% Boolean.
parse_bare_item(<<"?0",R/bits>>) -> {false, R};
parse_bare_item(<<"?1",R/bits>>) -> {true, R}.
parse_number(<<C,R/bits>>, L, Acc) when ?IS_DIGIT(C) ->
parse_number(R, L+1, <<Acc/binary,C>>);
-parse_number(<<C,R/bits>>, L, Acc) when C =:= $. ->
- parse_float(R, L, 0, <<Acc/binary,C>>);
+parse_number(<<$.,R/bits>>, L, Acc) ->
+ parse_decimal(R, L, 0, Acc, <<>>);
parse_number(R, L, Acc) when L =< 15 ->
{binary_to_integer(Acc), R}.
-parse_float(<<C,R/bits>>, L1, L2, Acc) when ?IS_DIGIT(C) ->
- parse_float(R, L1, L2+1, <<Acc/binary,C>>);
-parse_float(R, L1, L2, Acc) when
- L1 =< 9, L2 =< 6;
- L1 =< 10, L2 =< 5;
- L1 =< 11, L2 =< 4;
- L1 =< 12, L2 =< 3;
- L1 =< 13, L2 =< 2;
- L1 =< 14, L2 =< 1 ->
- {binary_to_float(Acc), R}.
+parse_decimal(<<C,R/bits>>, L1, L2, IntAcc, FracAcc) when ?IS_DIGIT(C) ->
+ parse_decimal(R, L1, L2+1, IntAcc, <<FracAcc/binary,C>>);
+parse_decimal(R, L1, L2, IntAcc, FracAcc0) when L1 =< 12, L2 >= 1, L2 =< 3 ->
+ %% While not strictly required this gives a more consistent representation.
+ FracAcc = case FracAcc0 of
+ <<$0>> -> <<>>;
+ <<$0,$0>> -> <<>>;
+ <<$0,$0,$0>> -> <<>>;
+ <<A,B,$0>> -> <<A,B>>;
+ <<A,$0,$0>> -> <<A>>;
+ <<A,$0>> -> <<A>>;
+ _ -> FracAcc0
+ end,
+ Mul = case byte_size(FracAcc) of
+ 3 -> 1000;
+ 2 -> 100;
+ 1 -> 10;
+ 0 -> 1
+ end,
+ Int = binary_to_integer(IntAcc),
+ Frac = case FracAcc of
+ <<>> -> 0;
+ %% Mind the sign.
+ _ when Int < 0 -> -binary_to_integer(FracAcc);
+ _ -> binary_to_integer(FracAcc)
+ end,
+ {{decimal, {Int * Mul + Frac, -byte_size(FracAcc)}}, R}.
parse_string(<<$\\,$",R/bits>>, Acc) ->
parse_string(R, <<Acc/binary,$">>);
@@ -215,7 +240,7 @@ parse_token(<<C,R/bits>>, Acc) when ?IS_TOKEN(C) or (C =:= $:) or (C =:= $/) ->
parse_token(R, Acc) ->
{{token, Acc}, R}.
-parse_binary(<<$*,R/bits>>, Acc) ->
+parse_binary(<<$:,R/bits>>, Acc) ->
{{binary, base64:decode(Acc)}, R};
parse_binary(<<C,R/bits>>, Acc) when ?IS_ALPHANUM(C) or (C =:= $+) or (C =:= $/) or (C =:= $=) ->
parse_binary(R, <<Acc/binary,C>>).
@@ -231,10 +256,13 @@ parse_struct_hd_test_() ->
%% The implementation is strict. We fail whenever we can.
CanFail = maps:get(<<"can_fail">>, Test, false),
MustFail = maps:get(<<"must_fail">>, Test, false),
+ io:format("must fail ~p~nexpected json ~0p~n",
+ [MustFail, maps:get(<<"expected">>, Test, undefined)]),
Expected = case MustFail of
true -> undefined;
false -> expected_to_term(maps:get(<<"expected">>, Test))
end,
+ io:format("expected term: ~0p", [Expected]),
Raw = raw_to_binary(Raw0),
case HeaderType of
<<"dictionary">> when MustFail; CanFail ->
@@ -251,7 +279,7 @@ parse_struct_hd_test_() ->
<<"list">> when MustFail; CanFail ->
{'EXIT', _} = (catch parse_list(Raw));
<<"dictionary">> ->
- {Expected, _Order} = (catch parse_dictionary(Raw));
+ Expected = (catch parse_dictionary(Raw));
<<"item">> ->
Expected = (catch parse_item(Raw));
<<"list">> ->
@@ -265,26 +293,45 @@ parse_struct_hd_test_() ->
} <- Tests]
end || File <- Files]).
+%% The tests JSON use arrays for almost everything. Identifying
+%% what is what requires looking deeper in the values:
+%%
+%% dict: [["k", v], ["k2", v2]] (values may have params)
+%% params: [["k", v], ["k2", v2]] (no params for values)
+%% list: [e1, e2, e3]
+%% inner-list: [[ [items...], params]]
+%% item: [bare, params]
+
%% Item.
-expected_to_term(E=[_, Params]) when is_map(Params) ->
- e2t(E);
+expected_to_term([Bare, []])
+ when is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) ->
+ {item, e2tb(Bare), []};
+expected_to_term([Bare, Params = [[<<_/bits>>, _]|_]])
+ when is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) ->
+ {item, e2tb(Bare), e2tp(Params)};
+%% Empty list or dictionary.
+expected_to_term([]) ->
+ [];
+%% Dictionary.
+%%
+%% We exclude empty list from values because that could
+%% be confused with an outer list of strings. There is
+%% currently no conflicts in the tests thankfully.
+expected_to_term(Dict = [[<<_/bits>>, V]|_]) when V =/= [] ->
+ e2t(Dict);
%% Outer list.
-expected_to_term(Expected) when is_list(Expected) ->
- [e2t(E) || E <- Expected];
-expected_to_term(Expected) ->
- e2t(Expected).
+expected_to_term(List) when is_list(List) ->
+ [e2t(E) || E <- List].
%% Dictionary.
-e2t(Dict) when is_map(Dict) ->
- maps:map(fun(_, V) -> e2t(V) end, Dict);
+e2t(Dict = [[<<_/bits>>, _]|_]) ->
+ [{K, e2t(V)} || [K, V] <- Dict];
%% Inner list.
e2t([List, Params]) when is_list(List) ->
- {with_params, [e2t(E) || E <- List],
- maps:map(fun(_, P) -> e2tb(P) end, Params)};
+ {list, [e2t(E) || E <- List], e2tp(Params)};
%% Item.
e2t([Bare, Params]) ->
- {with_params, e2tb(Bare),
- maps:map(fun(_, P) -> e2tb(P) end, Params)}.
+ {item, e2tb(Bare), e2tp(Params)}.
%% Bare item.
e2tb(#{<<"__type">> := <<"token">>, <<"value">> := V}) ->
@@ -293,11 +340,18 @@ e2tb(#{<<"__type">> := <<"binary">>, <<"value">> := V}) ->
{binary, base32:decode(V)};
e2tb(V) when is_binary(V) ->
{string, V};
-e2tb(null) ->
- undefined;
+e2tb(V) when is_float(V) ->
+ %% There should be no rounding needed for the test cases.
+ {decimal, decimal:to_decimal(V, #{precision => 3, rounding => round_down})};
e2tb(V) ->
V.
+%% Params.
+e2tp([]) ->
+ [];
+e2tp(Params) ->
+ [{K, e2tb(V)} || [K, V] <- Params].
+
%% The Cowlib parsers currently do not support resuming parsing
%% in the case of multiple headers. To make tests work we modify
%% the raw value the same way Cowboy does when encountering
@@ -308,7 +362,7 @@ e2tb(V) ->
raw_to_binary(RawList) ->
trim_ws(iolist_to_binary(lists:join(<<", ">>, RawList))).
-trim_ws(<<C,R/bits>>) when ?IS_WS(C) -> trim_ws(R);
+trim_ws(<<$\s,R/bits>>) -> trim_ws(R);
trim_ws(R) -> trim_ws_end(R, byte_size(R) - 1).
trim_ws_end(_, -1) ->
@@ -316,7 +370,6 @@ trim_ws_end(_, -1) ->
trim_ws_end(Value, N) ->
case binary:at(Value, N) of
$\s -> trim_ws_end(Value, N - 1);
- $\t -> trim_ws_end(Value, N - 1);
_ ->
S = N + 1,
<< Value2:S/binary, _/bits >> = Value,
@@ -326,71 +379,118 @@ trim_ws_end(Value, N) ->
%% Building.
--spec dictionary(#{binary() => sh_item() | sh_inner_list()}
- | [{binary(), sh_item() | sh_inner_list()}])
+-spec dictionary(#{binary() => sh_item() | sh_inner_list()} | sh_dictionary())
-> iolist().
-%% @todo Also accept this? dictionary({Map, Order}) ->
dictionary(Map) when is_map(Map) ->
dictionary(maps:to_list(Map));
dictionary(KVList) when is_list(KVList) ->
lists:join(<<", ">>, [
- [Key, $=, item_or_inner_list(Value)]
+ case Value of
+ true -> Key;
+ _ -> [Key, $=, item_or_inner_list(Value)]
+ end
|| {Key, Value} <- KVList]).
-spec item(sh_item()) -> iolist().
-item({with_params, BareItem, Params}) ->
+item({item, BareItem, Params}) ->
[bare_item(BareItem), params(Params)].
-spec list(sh_list()) -> iolist().
list(List) ->
lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]).
-item_or_inner_list(Value={with_params, List, _}) when is_list(List) ->
+item_or_inner_list(Value = {list, _, _}) ->
inner_list(Value);
item_or_inner_list(Value) ->
item(Value).
-inner_list({with_params, List, Params}) ->
+inner_list({list, List, Params}) ->
[$(, lists:join($\s, [item(Value) || Value <- List]), $), params(Params)].
bare_item({string, String}) ->
[$", escape_string(String, <<>>), $"];
+%% @todo Must fail if Token has invalid characters.
bare_item({token, Token}) ->
Token;
bare_item({binary, Binary}) ->
- [$*, base64:encode(Binary), $*];
+ [$:, base64:encode(Binary), $:];
+bare_item({decimal, {Base, Exp}}) when Exp >= 0 ->
+ Mul = case Exp of
+ 0 -> 1;
+ 1 -> 10;
+ 2 -> 100;
+ 3 -> 1000;
+ 4 -> 10000;
+ 5 -> 100000;
+ 6 -> 1000000;
+ 7 -> 10000000;
+ 8 -> 100000000;
+ 9 -> 1000000000;
+ 10 -> 10000000000;
+ 11 -> 100000000000;
+ 12 -> 1000000000000
+ end,
+ MaxLenWithSign = if
+ Base < 0 -> 13;
+ true -> 12
+ end,
+ Bin = integer_to_binary(Base * Mul),
+ true = byte_size(Bin) =< MaxLenWithSign,
+ [Bin, <<".0">>];
+bare_item({decimal, {Base, -1}}) ->
+ Int = Base div 10,
+ Frac = abs(Base) rem 10,
+ [integer_to_binary(Int), $., integer_to_binary(Frac)];
+bare_item({decimal, {Base, -2}}) ->
+ Int = Base div 100,
+ Frac = abs(Base) rem 100,
+ [integer_to_binary(Int), $., integer_to_binary(Frac)];
+bare_item({decimal, {Base, -3}}) ->
+ Int = Base div 1000,
+ Frac = abs(Base) rem 1000,
+ [integer_to_binary(Int), $., integer_to_binary(Frac)];
+bare_item({decimal, {Base, Exp}}) ->
+ Div = exp_div(Exp),
+ Int0 = Base div Div,
+ true = abs(Int0) < 1000000000000,
+ Frac0 = abs(Base) rem Div,
+ DivFrac = Div div 1000,
+ Frac1 = Frac0 div DivFrac,
+ {Int, Frac} = if
+ (Frac0 rem DivFrac) > (DivFrac div 2) ->
+ case Frac1 of
+ 999 when Int0 < 0 -> {Int0 - 1, 0};
+ 999 -> {Int0 + 1, 0};
+ _ -> {Int0, Frac1 + 1}
+ end;
+ true ->
+ {Int0, Frac1}
+ end,
+ [integer_to_binary(Int), $., if
+ Frac < 10 -> [$0, $0, integer_to_binary(Frac)];
+ Frac < 100 -> [$0, integer_to_binary(Frac)];
+ true -> integer_to_binary(Frac)
+ end];
bare_item(Integer) when is_integer(Integer) ->
integer_to_binary(Integer);
-%% In order to properly reproduce the float as a string we
-%% must first determine how many decimals we want in the
-%% fractional component, otherwise rounding errors may occur.
-bare_item(Float) when is_float(Float) ->
- Decimals = case trunc(Float) of
- I when I >= 10000000000000 -> 1;
- I when I >= 1000000000000 -> 2;
- I when I >= 100000000000 -> 3;
- I when I >= 10000000000 -> 4;
- I when I >= 1000000000 -> 5;
- _ -> 6
- end,
- float_to_binary(Float, [{decimals, Decimals}, compact]);
bare_item(true) ->
<<"?1">>;
bare_item(false) ->
<<"?0">>.
+exp_div(0) -> 1;
+exp_div(N) -> 10 * exp_div(N + 1).
+
escape_string(<<>>, Acc) -> Acc;
escape_string(<<$\\,R/bits>>, Acc) -> escape_string(R, <<Acc/binary,$\\,$\\>>);
escape_string(<<$",R/bits>>, Acc) -> escape_string(R, <<Acc/binary,$\\,$">>);
escape_string(<<C,R/bits>>, Acc) -> escape_string(R, <<Acc/binary,C>>).
params(Params) ->
- maps:fold(fun
- (Key, undefined, Acc) ->
- [[$;, Key]|Acc];
- (Key, Value, Acc) ->
- [[$;, Key, $=, bare_item(Value)]|Acc]
- end, [], Params).
+ [case Param of
+ {Key, true} -> [$;, Key];
+ {Key, Value} -> [$;, Key, $=, bare_item(Value)]
+ end || Param <- Params].
-ifdef(TEST).
struct_hd_identity_test_() ->
@@ -400,10 +500,12 @@ struct_hd_identity_test_() ->
Tests = jsx:decode(JSON, [return_maps]),
[
{iolist_to_binary(io_lib:format("~s: ~s", [filename:basename(File), Name])), fun() ->
+ io:format("expected json ~0p~n", [Expected0]),
Expected = expected_to_term(Expected0),
+ io:format("expected term: ~0p", [Expected]),
case HeaderType of
<<"dictionary">> ->
- {Expected, _Order} = parse_dictionary(iolist_to_binary(dictionary(Expected)));
+ Expected = parse_dictionary(iolist_to_binary(dictionary(Expected)));
<<"item">> ->
Expected = parse_item(iolist_to_binary(item(Expected)));
<<"list">> ->
diff --git a/src/cow_http_te.erl b/src/cow_http_te.erl
index 57d5167..e3473cf 100644
--- a/src/cow_http_te.erl
+++ b/src/cow_http_te.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2014-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2014-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_iolists.erl b/src/cow_iolists.erl
index dcb48d7..a5e75df 100644
--- a/src/cow_iolists.erl
+++ b/src/cow_iolists.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2017-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2017-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_link.erl b/src/cow_link.erl
index 8320297..b649786 100644
--- a/src/cow_link.erl
+++ b/src/cow_link.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2019, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2019-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_mimetypes.erl b/src/cow_mimetypes.erl
index 07fc69f..756e609 100644
--- a/src/cow_mimetypes.erl
+++ b/src/cow_mimetypes.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_mimetypes.erl.src b/src/cow_mimetypes.erl.src
index 2c57834..7cccdd3 100644
--- a/src/cow_mimetypes.erl.src
+++ b/src/cow_mimetypes.erl.src
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_multipart.erl b/src/cow_multipart.erl
index f418813..4d6d574 100644
--- a/src/cow_multipart.erl
+++ b/src/cow_multipart.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2014-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2014-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_qpack.erl b/src/cow_qpack.erl
new file mode 100644
index 0000000..027e29c
--- /dev/null
+++ b/src/cow_qpack.erl
@@ -0,0 +1,1581 @@
+%% Copyright (c) 2020-2024, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_qpack).
+-dialyzer(no_improper_lists).
+
+-export([init/1]).
+-export([init/3]).
+
+-export([decode_field_section/3]).
+-export([execute_encoder_instructions/2]).
+-export([decoder_cancel_stream/1]). %% @todo Use it.
+
+-export([encode_field_section/3]).
+-export([encode_field_section/4]).
+-export([execute_decoder_instructions/2]).
+-export([encoder_set_settings/3]).
+
+-record(state, {
+ %% Configuration.
+ %%
+ %% For the encoder these values will be set to
+ %% the lowest value between configuration and SETTINGS.
+
+ %% Whether the configured values can be used. The
+ %% decoder can always use the configured values.
+ %% The encoder must wait for the SETTINGS frame.
+ settings_received :: boolean(),
+
+ %% Maximum size of the table.
+ max_table_capacity = 0 :: non_neg_integer(),
+
+ %% Maximum number of potentially blocked streams.
+ max_blocked_streams = 0 :: non_neg_integer(),
+
+ %% Dynamic table.
+
+ %% The current max table capacity after the encoder
+ %% sent an instruction to change the capacity.
+ capacity = 0 :: non_neg_integer(),
+
+ %% The size of each entry is len(Name) + len(Value) + 32.
+ size = 0 :: non_neg_integer(),
+
+ %% The number of entries ever inserted in the dynamic table.
+ %% This value is used on the decoder's size to know whether
+ %% it can decode a field section; and on both sides to find
+ %% entries in the dynamic table.
+ insert_count = 0 :: non_neg_integer(),
+
+ %% The dynamic table. The first value is the size of the entry
+ %% and the second value the entry (Name, Value tuple). The
+ %% order of the entries is from newest to oldest.
+ %%
+ %% If 4 entries were inserted, the index of each entry would
+ %% be [3, 2, 1, 0]. If 4 entries were inserted and 1 of them
+ %% was later dropped, the index of each entry remaining would
+ %% be [3, 2, 1] and the insert_count value would be 3, allowing
+ %% us to know what index the newest entry is using.
+ dyn_table = [] :: [{pos_integer(), {binary(), binary()}}],
+
+ %% Decoder-specific state.
+
+ %% We keep track of streams that are currently blocked
+ %% in a map for easy counting and removal. A stream may
+ %% be blocked at the beginning of the decoding process.
+ %% A stream may be unblocked after encoder instructions
+ %% have been processed.
+ blocked_streams = #{} :: #{cow_http3:stream_id() => true},
+
+ %% Encoder-specific state.
+
+ %% We keep track of the known received count of the
+ %% decoder (the insert_count it has that we know of)
+ %% so that we know when we can evict an entry we
+ %% inserted. We cannot evict an entry before it has
+ %% been acknowledged. The known received count can
+ %% also be used to avoid blocking.
+ known_received_count = 0 :: non_neg_integer(),
+
+ %% We keep track of the streams that have used the
+ %% dynamic table in order to increase the known
+ %% received count when the decoder acks a stream.
+ %% We only keep the insert_count value for a stream's
+ %% field section.
+ %%
+ %% Because a stream can send multiple field sections
+ %% (informational response, final response, trailers),
+ %% we use a list to keep track of the different sections.
+ %% A FIFO structure would be more adequate but we do
+ %% not expect a lot of field sections per stream.
+ references = #{} :: #{cow_http3:stream_id() => [non_neg_integer()]},
+
+ %% Smallest absolute index the encoder will reference.
+ %% Indexes below may exist in the dynamic table but are
+ %% in the process of being phased out and will eventually
+ %% be evicted. Only duplicating these indexes is allowed.
+ draining_index = 0 :: non_neg_integer(),
+
+ %% Size of the dynamic table that is available for
+ %% eviction during encoding. Both this value and the
+ %% draining_index are computed at the start of encoding.
+ %% Note that for the encoder this cannot reach negatives,
+ %% but might for the decoder.
+ draining_size = 0 :: integer()
+}).
+
+-opaque state() :: #state{}.
+-export_type([state/0]).
+
+-type error() :: qpack_decompression_failed
+ | qpack_encoder_stream_error
+ | qpack_decoder_stream_error.
+-export_type([error/0]).
+
+-type encoder_opts() :: #{
+ huffman => boolean()
+}.
+-export_type([encoder_opts/0]).
+
+%-ifdef(TEST).
+%-include_lib("proper/include/proper.hrl").
+%-endif.
+
+-include("cow_hpack_common.hrl").
+
+%% State initialization.
+
+-spec init(decoder | encoder) -> state().
+
+init(Role) ->
+ init(Role, 4096, 0).
+
+-spec init(decoder | encoder, non_neg_integer(), non_neg_integer()) -> state().
+
+init(Role, MaxTableCapacity, MaxBlockedStreams) ->
+ #state{
+ settings_received=Role =:= decoder,
+ max_table_capacity=MaxTableCapacity,
+ max_blocked_streams=MaxBlockedStreams
+ }.
+
+%% Decoding.
+
+-spec decode_field_section(binary(), cow_http3:stream_id(), State)
+ -> {ok, cow_http:headers(), binary(), State}
+ | {blocked, State}
+ | {connection_error, error(), atom()}
+ when State::state().
+
+decode_field_section(Data, StreamID, State=#state{max_blocked_streams=MaxBlockedStreams,
+ insert_count=InsertCount, blocked_streams=BlockedStreams}) ->
+ {EncInsertCount, Rest} = dec_big_int(Data, 0, 0),
+ ReqInsertCount = decode_req_insert_count(EncInsertCount, State),
+ if
+ ReqInsertCount =< InsertCount ->
+ decode_field_section(Rest, StreamID, State, ReqInsertCount);
+ %% The stream is blocked and we do not allow that;
+ %% or there are already too many blocked streams.
+ map_size(BlockedStreams) > MaxBlockedStreams ->
+ {connection_error, qpack_decompression_failed,
+ 'More blocked streams than configuration allows. (RFC9204 2.1.2)'};
+ %% The stream is blocked and we allow that.
+ %% The caller must keep the data and retry after
+ %% calling the execute_encoder_instructions function.
+ true ->
+ {blocked, State#state{blocked_streams=BlockedStreams#{StreamID => true}}}
+ end.
+
+decode_field_section(<<S:1,Rest0/bits>>, StreamID,
+ State0=#state{blocked_streams=BlockedStreams}, ReqInsertCount) ->
+ State1 = State0#state{
+ %% The stream may have been blocked. Unblock it.
+ blocked_streams=maps:remove(StreamID, BlockedStreams),
+ %% Reset the draining_size. We don't use it, but don't
+ %% want the value to unnecessarily become a big int.
+ draining_size=0
+ },
+ {DeltaBase, Rest} = dec_int7(Rest0),
+ Base = case S of
+ 0 -> ReqInsertCount + DeltaBase;
+ 1 -> ReqInsertCount - DeltaBase - 1
+ end,
+ case decode(Rest, State1, Base, []) of
+ {ok, Headers, State} when ReqInsertCount =:= 0 ->
+ {ok, Headers, <<>>, State};
+ {ok, Headers, State} ->
+ {ok, Headers, enc_int7(StreamID, 2#1), State}
+ end.
+
+decode_req_insert_count(0, _) ->
+ 0;
+decode_req_insert_count(EncInsertCount, #state{
+ max_table_capacity=MaxTableCapacity, insert_count=InsertCount}) ->
+ MaxEntries = MaxTableCapacity div 32,
+ FullRange = 2 * MaxEntries,
+ if
+ EncInsertCount > FullRange ->
+ {connection_error, qpack_decompression_failed,
+ 'EncInsertCount larger than maximum possible value. (RFC9204 4.5.1.1)'};
+ true ->
+ MaxValue = InsertCount + MaxEntries,
+ MaxWrapped = (MaxValue div FullRange) * FullRange,
+ ReqInsertCount = MaxWrapped + EncInsertCount - 1,
+ if
+ ReqInsertCount > MaxValue ->
+ if
+ ReqInsertCount =< FullRange ->
+ {connection_error, qpack_decompression_failed,
+ 'ReqInsertCount value larger than current maximum value. (RFC9204 4.5.1.1)'};
+ true ->
+ ReqInsertCount - FullRange
+ end;
+ ReqInsertCount =:= 0 ->
+ {connection_error, qpack_decompression_failed,
+ 'ReqInsertCount value of 0 must be encoded as 0. (RFC9204 4.5.1.1)'};
+ true ->
+ ReqInsertCount
+ end
+ end.
+
+decode(<<>>, State, _, Acc) ->
+ {ok, lists:reverse(Acc), State};
+%% Indexed field line.
+decode(<<2#1:1,T:1,Rest0/bits>>, State, Base, Acc) ->
+ {Index, Rest} = dec_int6(Rest0),
+ Entry = case T of
+ 0 -> table_get_dyn_pre_base(Index, Base, State);
+ 1 -> table_get_static(Index)
+ end,
+ decode(Rest, State, Base, [Entry|Acc]);
+%% Indexed field line with post-base index.
+decode(<<2#0001:4,Rest0/bits>>, State, Base, Acc) ->
+ {Index, Rest} = dec_int4(Rest0),
+ Entry = table_get_dyn_post_base(Index, Base, State),
+ decode(Rest, State, Base, [Entry|Acc]);
+%% Literal field line with name reference.
+decode(<<2#01:2,_N:1,T:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int4(Rest0),
+ Name = case T of
+ 0 -> table_get_name_dyn_rel(NameIndex, State);
+ 1 -> table_get_name_static(NameIndex)
+ end,
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ decode(Rest, State, Base, [{Name, Value}|Acc]);
+%% Literal field line with post-base name reference.
+decode(<<2#0000:4,_N:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int3(Rest0),
+ Name = table_get_name_dyn_post_base(NameIndex, Base, State),
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ decode(Rest, State, Base, [{Name, Value}|Acc]);
+%% Literal field line with literal name.
+decode(<<2#001:3,_N:1,NameH:1,Rest0/bits>>, State, Base, Acc) ->
+ %% @todo N=1 the encoded field line MUST be encoded as literal, need to return metadata about this?
+ {NameLen, Rest1} = dec_int3(Rest0),
+ <<NameStr:NameLen/binary,ValueH:1,Rest2/bits>> = Rest1,
+ {Name, <<>>} = maybe_dec_huffman(NameStr, NameLen, NameH),
+ {ValueLen, Rest3} = dec_int7(Rest2),
+ {Value, Rest} = maybe_dec_huffman(Rest3, ValueLen, ValueH),
+ decode(Rest, State, Base, [{Name, Value}|Acc]).
+
+-spec execute_encoder_instructions(binary(), State)
+ -> {ok, binary(), State}
+ | {connection_error, qpack_encoder_stream_error, atom()}
+ when State::state().
+
+execute_encoder_instructions(Data, State) ->
+ execute_encoder_instructions(Data, State, 0).
+
+execute_encoder_instructions(<<>>, State, 0) ->
+ {ok, <<>>, State};
+execute_encoder_instructions(<<>>, State, Increment) ->
+ {ok, enc_int6(Increment, 2#00), State};
+%% Set dynamic table capacity.
+execute_encoder_instructions(<<2#001:3,Rest0/bits>>, State=#state{
+ max_table_capacity=MaxTableCapacity, capacity=Capacity0,
+ dyn_table=DynamicTable0}, Increment) ->
+ {Capacity, Rest} = dec_int5(Rest0),
+ if
+ %% Capacity larger than configured, or dynamic table
+ %% disabled when max_table_capacity=0.
+ Capacity > MaxTableCapacity ->
+ {connection_error, qpack_encoder_stream_error,
+ 'New table capacity higher than SETTINGS_QPACK_MAX_TABLE_CAPACITY. (RFC9204 3.2.3, RFC9204 4.3.1)'};
+ %% Table capacity was reduced. We must evict entries.
+ Capacity < Capacity0 ->
+ {DynamicTable, Size} = table_evict(DynamicTable0, Capacity, 0, []),
+ execute_encoder_instructions(Rest, State#state{capacity=Capacity,
+ size=Size, dyn_table=DynamicTable}, Increment);
+ %% Table capacity equal or higher than previous.
+ true ->
+ execute_encoder_instructions(Rest,
+ State#state{capacity=Capacity}, Increment)
+ end;
+%% Insert with name reference.
+execute_encoder_instructions(<<2#1:1,T:1,Rest0/bits>>, State, Increment) ->
+ {NameIndex, <<H:1,Rest1/bits>>} = dec_int6(Rest0),
+ Name = case T of
+ 0 -> table_get_name_dyn_rel(NameIndex, State);
+ 1 -> table_get_name_static(NameIndex)
+ end,
+ {ValueLen, Rest2} = dec_int7(Rest1),
+ {Value, Rest} = maybe_dec_huffman(Rest2, ValueLen, H),
+ execute_insert_instruction(Rest, State, Increment, {Name, Value});
+%% Insert with literal name.
+execute_encoder_instructions(<<2#01:2,NameH:1,Rest0/bits>>, State, Increment) ->
+ {NameLen, Rest1} = dec_int5(Rest0),
+ {Name, <<ValueH:1,Rest2/bits>>} = maybe_dec_huffman(Rest1, NameLen, NameH),
+ {ValueLen, Rest3} = dec_int7(Rest2),
+ {Value, Rest} = maybe_dec_huffman(Rest3, ValueLen, ValueH),
+ execute_insert_instruction(Rest, State, Increment, {Name, Value});
+%% Duplicate.
+execute_encoder_instructions(<<2#000:3,Rest0/bits>>, State, Increment) ->
+ {Index, Rest} = dec_int5(Rest0),
+ Entry = table_get_dyn_rel(Index, State),
+ execute_insert_instruction(Rest, State, Increment, Entry).
+
+execute_insert_instruction(Rest, State0, Increment, Entry) ->
+ case table_insert(Entry, State0) of
+ {ok, State} ->
+ execute_encoder_instructions(Rest, State, Increment + 1);
+ Error = {connection_error, _, _} ->
+ Error
+ end.
+
+%% @todo Export / spec.
+
+decoder_cancel_stream(StreamID) ->
+ enc_int6(StreamID, 2#01).
+
+dec_int3(<<2#111:3,Rest/bits>>) ->
+ dec_big_int(Rest, 7, 0);
+dec_int3(<<Int:3,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int4(<<2#1111:4,Rest/bits>>) ->
+ dec_big_int(Rest, 15, 0);
+dec_int4(<<Int:4,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int6(<<2#111111:6,Rest/bits>>) ->
+ dec_big_int(Rest, 63, 0);
+dec_int6(<<Int:6,Rest/bits>>) ->
+ {Int, Rest}.
+
+dec_int7(<<2#1111111:7,Rest/bits>>) ->
+ dec_big_int(Rest, 127, 0);
+dec_int7(<<Int:7,Rest/bits>>) ->
+ {Int, Rest}.
+
+maybe_dec_huffman(Data, ValueLen, 0) ->
+ <<Value:ValueLen/binary,Rest/bits>> = Data,
+ {Value, Rest};
+maybe_dec_huffman(Data, ValueLen, 1) ->
+ dec_huffman(Data, ValueLen, 0, <<>>).
+
+-ifdef(TEST).
+appendix_b_decoder_test() ->
+ %% Stream: 0
+ {ok, [
+ {<<":path">>, <<"/index.html">>}
+ ], <<>>, DecState0} = decode_field_section(<<
+ 16#0000:16,
+ 16#510b:16, 16#2f69:16, 16#6e64:16, 16#6578:16,
+ 16#2e68:16, 16#746d:16, 16#6c
+ >>, 0, init(decoder, 4096, 0)),
+ #state{
+ capacity=0,
+ size=0,
+ insert_count=0,
+ dyn_table=[]
+ } = DecState0,
+ %% Stream: Encoder
+ {ok, EncData1, DecState1} = execute_encoder_instructions(<<
+ 16#3fbd01:24,
+ 16#c00f:16, 16#7777:16, 16#772e:16, 16#6578:16,
+ 16#616d:16, 16#706c:16, 16#652e:16, 16#636f:16,
+ 16#6d,
+ 16#c10c:16, 16#2f73:16, 16#616d:16, 16#706c:16,
+ 16#652f:16, 16#7061:16, 16#7468:16
+ >>, DecState0),
+ <<2#00:2,2:6>> = EncData1,
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState1,
+ %% Stream: 4
+ {ok, [
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/sample/path">>}
+ ], <<16#84>>, DecState2} = decode_field_section(<<
+ 16#0381:16,
+ 16#10,
+ 16#11
+ >>, 4, DecState1),
+ DecState1 = DecState2,
+ %% Stream: Encoder
+ {ok, EncData3, DecState3} = execute_encoder_instructions(<<
+ 16#4a63:16, 16#7573:16, 16#746f:16, 16#6d2d:16,
+ 16#6b65:16, 16#790c:16, 16#6375:16, 16#7374:16,
+ 16#6f6d:16, 16#2d76:16, 16#616c:16, 16#7565:16
+ >>, DecState2),
+ <<2#00:2,1:6>> = EncData3,
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState3,
+ %% Stream: Encoder
+ {ok, EncData4, DecState4} = execute_encoder_instructions(<<
+ 16#02
+ >>, DecState3),
+ <<2#00:2,1:6>> = EncData4,
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = DecState4,
+ %% Stream: 8
+ %%
+ %% Note that this one is not really received by the decoder
+ %% so we will ignore the decoder state before we continue.
+ {ok, [
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/">>},
+ {<<"custom-key">>, <<"custom-value">>}
+ ], <<16#88>>, IgnoredDecState} = decode_field_section(<<
+ 16#0500:16,
+ 16#80,
+ 16#c1,
+ 16#81
+ >>, 8, DecState4),
+ %% Note that the state did not change anyway.
+ DecState4 = IgnoredDecState,
+ %% Stream: Decoder - Stream Cancellation (Stream=8)
+ <<16#48>> = decoder_cancel_stream(8),
+ {ok, EncData5, DecState5} = execute_encoder_instructions(<<
+ 16#810d:16, 16#6375:16, 16#7374:16, 16#6f6d:16,
+ 16#2d76:16, 16#616c:16, 16#7565:16, 16#32
+ >>, DecState4),
+ <<2#00:2,1:6>> = EncData5,
+ #state{
+ capacity=220,
+ size=215,
+ insert_count=5,
+ dyn_table=[
+ {55, {<<"custom-key">>, <<"custom-value2">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}}
+ ]
+ } = DecState5,
+ ok.
+-endif.
+
+%% Encoding.
+
+-spec encode_field_section(cow_http:headers(), cow_http3:stream_id(), State)
+ -> {ok, iolist(), iolist(), State} when State::state().
+
+%% @todo Would be good to know encoder stream flow control to avoid writing there. Opts?
+encode_field_section(Headers, StreamID, State0) ->
+ encode_field_section(Headers, StreamID, State0, #{}).
+
+-spec encode_field_section(cow_http:headers(), cow_http3:stream_id(), State, encoder_opts())
+ -> {ok, iolist(), iolist(), State} when State::state().
+
+encode_field_section(Headers, StreamID, State0=#state{
+ max_table_capacity=MaxTableCapacity, insert_count=InsertCount,
+ references=Refs0}, Opts) ->
+ State1 = encode_update_drain_info(State0),
+ Base = InsertCount + 1,
+ {ReqInsertCount, EncData, Data, State} = encode(
+ Headers, StreamID, State1,
+ huffman_opt(Opts), 0, Base, [], []),
+ case ReqInsertCount of
+ 0 ->
+ {ok, [<<0:16>>|Data], EncData, State};
+ _ ->
+ MaxEntries = MaxTableCapacity div 32,
+ EncInsertCount = (ReqInsertCount rem (2 * MaxEntries)) + 1,
+ {S, DeltaBase} = if
+ %% We inserted new entries.
+ ReqInsertCount > Base ->
+ {2#1, ReqInsertCount - Base};
+ %% We only used existing entries.
+ ReqInsertCount =< Base ->
+ {2#0, ReqInsertCount - Base}
+ end,
+ %% Save the reference to avoid draining entries too quickly.
+ Refs = case Refs0 of
+ #{StreamID := ICs} ->
+ Refs0#{StreamID => [ReqInsertCount|ICs]};
+ _ ->
+ Refs0#{StreamID => [ReqInsertCount]}
+ end,
+ {ok, [enc_big_int(EncInsertCount, <<>>), enc_int7(DeltaBase, S)|Data], EncData,
+ State#state{references=Refs}}
+ end.
+
+%% We check how many entries we can evict. The result
+%% will take the form of a draining_index (the oldest
+%% entry the encoder can reference) as well as a
+%% draining_size (how much data can be gained by evicting).
+%%
+%% We first look at streams that have not been acknowledged
+%% and find the smallest insert_count value from them. We
+%% cannot evict any value that is newer than or equal to
+%% that value.
+%%
+%% Then we also need to make sure we don't evict too much
+%% from the table.
+%%
+%% Finally we go over the dynamic table to count how much
+%% we can actually drain and what the draining index really is.
+encode_update_drain_info(State=#state{max_table_capacity=MaxCapacity,
+ insert_count=InsertCount, dyn_table=DynTable, references=Refs}) ->
+ PendingInsertCount = if
+ %% When we don't use the dynamic table, or we didn't insert
+ %% anything yet, there are no references. We can drain
+ %% everything but are still constrained by the max draining size.
+ Refs =:= #{} ->
+ InsertCount;
+ true ->
+ maps:fold(fun(_, ICs, V) ->
+ IC = hd(lists:reverse(ICs)),
+ case V of
+ undefined -> IC;
+ _ -> min(IC, V)
+ end
+ end, undefined, Refs)
+ end,
+ %% We use a simple formula for calculating the maximum
+ %% draining size, found in nginx: we allow evicting
+ %% between 1/8th of the current table capacity and
+ %% 512 bytes, whichever is smaller. When the maximum
+ %% table capacity is small this formula may get us
+ %% a value that's too small to drain anything, so
+ %% we use 64 bytes as a minimum.
+ MaxDrainingSize0 = min(512, MaxCapacity div 8),
+ MaxDrainingSize = if
+ MaxDrainingSize0 < 64 -> 64;
+ true -> MaxDrainingSize0
+ end,
+ {DrainingIndex, DrainingSize} =
+ encode_update_drain_loop(lists:reverse(DynTable),
+ InsertCount - length(DynTable), PendingInsertCount,
+ 0, MaxDrainingSize),
+ State#state{
+ draining_index=DrainingIndex,
+ draining_size=DrainingSize
+ }.
+
+%% We go over the dynamic table in reverse order. We stop
+%% when we either reach the PendingInsertCount value or get
+%% above MaxDrainingSize. It's not possible to go over the
+%% entire dynamic table because we have references.
+encode_update_drain_loop(_, Index, PendingIndex, Size, _)
+ when Index =:= PendingIndex ->
+ {Index, Size};
+encode_update_drain_loop([{EntrySize, _}|_], Index, _, Size, MaxSize)
+ when Size + EntrySize > MaxSize ->
+ {Index, Size};
+encode_update_drain_loop([{EntrySize, _}|Tail], Index, PendingIndex, Size, MaxSize) ->
+ encode_update_drain_loop(Tail, Index + 1, PendingIndex, Size + EntrySize, MaxSize).
+
+encode([], _StreamID, State, _HuffmanOpt,
+ ReqInsertCount, _Base, EncAcc, Acc) ->
+ {ReqInsertCount, lists:reverse(EncAcc), lists:reverse(Acc), State};
+encode([{Name, Value0}|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc) ->
+ %% We conditionally call iolist_to_binary/1 because a small
+ %% but noticeable speed improvement happens when we do this.
+ %% (Or at least it did for cow_hpack.)
+ Value = if
+ is_binary(Value0) -> Value0;
+ true -> iolist_to_binary(Value0)
+ end,
+ Entry = {Name, Value},
+ encode_static([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc).
+
+encode_static([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc) ->
+ case table_find_static(Entry) of
+ not_found ->
+ encode_dyn([Entry|Tail], StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc, Acc);
+ StaticIndex ->
+ encode(Tail, StreamID, State, HuffmanOpt,
+ ReqInsertCount, Base, EncAcc,
+ %% Indexed Field Line. T=1 (static).
+ [enc_int6(StaticIndex, 2#11)|Acc])
+ end.
+
+encode_dyn([Entry|Tail], StreamID, State0=#state{draining_index=DrainingIndex},
+ HuffmanOpt, ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_dyn(Entry, State0) of
+ not_found ->
+ encode_static_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc);
+ %% When the index is below the drain index and there is enough
+ %% space in the table for duplicating the value, we do that
+ %% and use the duplicated index. If we can't then we must not
+ %% use the dynamic index for the field.
+ DynIndex when DynIndex < DrainingIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ %% We must reference the relative index of the entry we duplicated
+ %% before we duplicated it. The newest entry starts at 0. If we
+ %% have 3 entries in the table, the oldest one will have a relative
+ %% index of 2. Because we already inserted the duplicate, our
+ %% ReqInsertCount has 1 added, so for our previously 3 entries
+ %% table, we end up with a ReqInsertCount of 4. This means we
+ %% have to remove 2 from the difference to find the relative index.
+ DynIndexRel = ReqInsertCount - DynIndex - 2,
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Duplicate.
+ [[EncInstr|enc_int5(DynIndexRel, 2#000)]|EncAcc],
+ %% Indexed Field Line. T=0 (dynamic).
+ [enc_int6(Base - ReqInsertCount, 2#10)|Acc]);
+ false ->
+ encode_static_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc)
+ end;
+ DynIndex ->
+ ReqInsertCount = max(ReqInsertCount0, DynIndex),
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount, Base, EncAcc,
+ %% Indexed Field Line. T=0 (dynamic).
+ [enc_int6(Base - DynIndex - 1, 2#10)|Acc])
+ end.
+
+encode_static_name([Entry = {Name, Value}|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_name_static(Name) of
+ not_found ->
+ encode_dyn_name([Entry|Tail], StreamID, State0, HuffmanOpt,
+ ReqInsertCount0, Base, EncAcc, Acc);
+ StaticNameIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Name Reference. T=1 (static).
+ [[EncInstr, enc_int6(StaticNameIndex, 2#11)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Name Reference. N=0. T=1 (static).
+ [[enc_int4(StaticNameIndex, 2#0101)|enc_str(Value, HuffmanOpt)]|Acc])
+ end
+ end.
+
+encode_dyn_name([Entry = {Name, Value}|Tail], StreamID,
+ State0=#state{draining_index=DrainingIndex},
+ HuffmanOpt, ReqInsertCount0, Base, EncAcc, Acc) ->
+ case table_find_name_dyn(Name, State0) of
+ %% We can reference the dynamic name.
+ DynIndex when is_integer(DynIndex), DynIndex >= DrainingIndex ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ %% See comment in encode_dyn for why we remove 2.
+ DynIndexRel = ReqInsertCount - DynIndex - 2,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Name Reference. T=0 (dynamic).
+ [[EncInstr, enc_int6(DynIndexRel, 2#10)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Name Reference. N=0. T=0 (dynamic).
+ [[enc_int4(DynIndex, 2#0100)|enc_str(Value, HuffmanOpt)]|Acc])
+ end;
+ %% When there are no name to reference, or the name
+ %% is found below the drain index, we do not attempt
+ %% to refer to it.
+ _ ->
+ case encode_can_insert(Entry, State0) of
+ {true, EncInstr, State1} ->
+ {ok, State} = table_insert(Entry, State1),
+ #state{insert_count=ReqInsertCount} = State,
+ PostBaseIndex = length(EncAcc),
+ encode(Tail, StreamID, State, HuffmanOpt, ReqInsertCount, Base,
+ %% Insert with Literal Name.
+ [[EncInstr, enc_str6(Name, HuffmanOpt, 2#01)|enc_str(Value, HuffmanOpt)]
+ |EncAcc],
+ %% Indexed Field Line with Post-Base Index.
+ [enc_int4(PostBaseIndex, 2#0001)|Acc]);
+ false ->
+ encode(Tail, StreamID, State0, HuffmanOpt, ReqInsertCount0, Base, EncAcc,
+ %% Literal Field Line with Literal Name. N=0.
+ [[enc_str4(Name, HuffmanOpt, 2#0010)|enc_str(Value, HuffmanOpt)]|Acc])
+ end
+ end.
+
+%% @todo We should make sure we have a large enough flow control window.
+%%
+%% We can never insert before receiving the SETTINGS frame.
+encode_can_insert(_, #state{settings_received=false}) ->
+ false;
+encode_can_insert({Name, Value}, State=#state{
+ max_table_capacity=MaxCapacity, capacity=Capacity,
+ size=Size, draining_size=DrainingSize}) ->
+ EntrySize = byte_size(Name) + byte_size(Value) + 32,
+ if
+ %% We have enough space in the current capacity,
+ %% without having to drain entries.
+ EntrySize + Size =< Capacity ->
+ {true, <<>>, State};
+ %% We have enough space if we increase the capacity.
+ %% We prefer to first increase the capacity to the
+ %% maximum before we start draining entries.
+ EntrySize + Size =< MaxCapacity ->
+ {true, enc_int5(MaxCapacity, 2#001),
+ State#state{capacity=MaxCapacity}};
+ %% We are already at max capacity and have enough
+ %% space if we drain entries.
+ EntrySize + Size =< Capacity + DrainingSize, Capacity =:= MaxCapacity ->
+ {true, <<>>, State};
+ %% We are not at max capacity. We have enough space
+ %% if we both increase the capacity and drain entries.
+ EntrySize + Size =< MaxCapacity + DrainingSize ->
+ {true, enc_int5(MaxCapacity, 2#001),
+ State#state{capacity=MaxCapacity}};
+ true ->
+ false
+ end.
+
+-spec execute_decoder_instructions(binary(), State)
+ -> {ok, State} | {connection_error, qpack_decoder_stream_error, atom()}
+ when State::state().
+execute_decoder_instructions(<<>>, State) ->
+ {ok, State};
+%% Section acknowledgement.
+%% We remove one reference and if needed increase the known received count.
+execute_decoder_instructions(<<2#1:1,Rest0/bits>>, State=#state{
+ known_received_count=KnownReceivedCount0, references=Refs}) ->
+ {StreamID, Rest} = dec_int7(Rest0),
+ case Refs of
+ #{StreamID := [InsertCount]} ->
+ KnownReceivedCount = max(KnownReceivedCount0, InsertCount),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount,
+ references=maps:remove(StreamID, Refs)});
+ #{StreamID := InsertCounts} ->
+ [InsertCount|InsertCountsTail] = lists:reverse(InsertCounts),
+ KnownReceivedCount = max(KnownReceivedCount0, InsertCount),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount,
+ references=Refs#{StreamID => lists:reverse(InsertCountsTail)}});
+ _ ->
+ {connection_error, qpack_decoder_stream_error,
+ 'Acknowledgement received for stream with no pending sections. (RFC9204 4.4.1)'}
+ end;
+%% Stream cancellation.
+%% We drop all references for the given stream.
+execute_decoder_instructions(<<2#01:2,Rest0/bits>>, State=#state{references=Refs}) ->
+ {StreamID, Rest} = dec_int6(Rest0),
+ case Refs of
+ #{StreamID := _} ->
+ execute_decoder_instructions(Rest, State#state{
+ references=maps:remove(StreamID, Refs)});
+ %% It is not an error for the reference to not exist.
+ %% The dynamic table may not have been used for this
+ %% stream.
+ _ ->
+ execute_decoder_instructions(Rest, State)
+ end;
+%% Insert count increment.
+%% We increase the known received count.
+execute_decoder_instructions(<<2#00:2,Rest0/bits>>, State=#state{
+ known_received_count=KnownReceivedCount}) ->
+ {Increment, Rest} = dec_int6(Rest0),
+ execute_decoder_instructions(Rest, State#state{
+ known_received_count=KnownReceivedCount + Increment}).
+
+%% Inform the encoder of the relevant SETTINGS from the decoder.
+%% The encoder will choose the smallest value between what it
+%% has configured and what it received through SETTINGS. Should
+%% there be no value in the SETTINGS then 0 must be given.
+
+-spec encoder_set_settings(non_neg_integer(), non_neg_integer(), state()) -> state().
+
+encoder_set_settings(MaxTableCapacity, MaxBlockedStreams, State=#state{
+ max_table_capacity=MaxTableCapacityConfigured,
+ max_blocked_streams=MaxBlockedStreamsConfigured}) ->
+ State#state{
+ settings_received=true,
+ max_table_capacity=min(MaxTableCapacity, MaxTableCapacityConfigured),
+ max_blocked_streams=min(MaxBlockedStreams, MaxBlockedStreamsConfigured)
+ }.
+
+huffman_opt(#{huffman := false}) -> no_huffman;
+huffman_opt(_) -> huffman.
+
+enc_int3(Int, Prefix) when Int < 7 ->
+ <<Prefix:5, Int:3>>;
+enc_int3(Int, Prefix) ->
+ enc_big_int(Int - 7, <<Prefix:5, 2#111:3>>).
+
+enc_int4(Int, Prefix) when Int < 15 ->
+ <<Prefix:4, Int:4>>;
+enc_int4(Int, Prefix) ->
+ enc_big_int(Int - 15, <<Prefix:4, 2#1111:4>>).
+
+enc_str4(Str, huffman, Prefix) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int3(byte_size(Str2), Prefix * 2 + 2#1)|Str2];
+enc_str4(Str, no_huffman, Prefix) ->
+ [enc_int3(byte_size(Str), Prefix * 2 + 2#0)|Str].
+
+enc_str6(Str, huffman, Prefix) ->
+ Str2 = enc_huffman(Str, <<>>),
+ [enc_int5(byte_size(Str2), Prefix * 2 + 2#1)|Str2];
+enc_str6(Str, no_huffman, Prefix) ->
+ [enc_int5(byte_size(Str), Prefix * 2 + 2#0)|Str].
+
+-ifdef(TEST).
+%% This function is a good starting point to let the calling
+%% process insert entries in the dynamic table outside of
+%% encoding a field section. To be usable more broadly
+%% it would need to handle the case where a static name
+%% is found, but also consider how it should be used:
+%% do we have capacity in the table? We don't have
+%% capacity before receiving the SETTINGS frame. Until
+%% then it will be restricted to testing.
+encoder_insert_entry(Entry={Name, Value}, State0, Opts) ->
+ {ok, State} = table_insert(Entry, State0),
+ HuffmanOpt = huffman_opt(Opts),
+ case table_find_name_static(Name) of
+ not_found ->
+ case table_find_name_dyn(Name, State0) of
+ not_found ->
+ %% Insert with Literal Name.
+ {ok, [enc_str6(Name, HuffmanOpt, 2#01)|enc_str(Value, HuffmanOpt)], State};
+ DynNameIndex ->
+ #state{insert_count=ReqInsertCount} = State,
+ %% See comment in encode_dyn for why we remove 2.
+ DynNameIndexRel = ReqInsertCount - DynNameIndex - 2,
+ %% Insert with Name Reference. T=0 (dynamic).
+ {ok, [enc_int6(DynNameIndexRel, 2#10)|enc_str(Value, HuffmanOpt)], State}
+ end
+ end.
+
+appendix_b_encoder_test() ->
+ %% We limit the encoder to 220 bytes for table capacity.
+ EncState0 = init(encoder, 220, 0),
+ %% Stream: 0
+ {ok, Data1, EncData1, EncState1} = encode_field_section([
+ {<<":path">>, <<"/index.html">>}
+ ], 0, EncState0, #{huffman => false}),
+ <<>> = iolist_to_binary(EncData1),
+ <<
+ 16#0000:16,
+ 16#510b:16, 16#2f69:16, 16#6e64:16, 16#6578:16,
+ 16#2e68:16, 16#746d:16, 16#6c
+ >> = iolist_to_binary(Data1),
+ #state{
+ capacity=0,
+ size=0,
+ insert_count=0,
+ dyn_table=[]
+ } = EncState1,
+ %% Simulate receiving of the SETTINGS frame enabling the dynamic table.
+ EncState2 = encoder_set_settings(4096, 0, EncState1),
+ #state{
+ settings_received=true,
+ max_table_capacity=220,
+ capacity=0
+ } = EncState2,
+ %% Stream: 4 (and Encoder)
+ {ok, Data3, EncData3, EncState3} = encode_field_section([
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/sample/path">>}
+ ], 4, EncState2, #{huffman => false}),
+ <<
+ 16#3fbd01:24,
+ 16#c00f:16, 16#7777:16, 16#772e:16, 16#6578:16,
+ 16#616d:16, 16#706c:16, 16#652e:16, 16#636f:16,
+ 16#6d,
+ 16#c10c:16, 16#2f73:16, 16#616d:16, 16#706c:16,
+ 16#652f:16, 16#7061:16, 16#7468:16
+ >> = iolist_to_binary(EncData3),
+ <<
+ 16#0381:16,
+ 16#10,
+ 16#11
+ >> = iolist_to_binary(Data3),
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState3,
+ %% Stream: Decoder
+ {ok, EncState4} = execute_decoder_instructions(<<16#84>>, EncState3),
+ #state{
+ capacity=220,
+ size=106,
+ insert_count=2,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState4,
+ %% Stream: Encoder
+ {ok, EncData5, EncState5} = encoder_insert_entry(
+ {<<"custom-key">>, <<"custom-value">>},
+ EncState4, #{huffman => false}),
+ <<
+ 16#4a63:16, 16#7573:16, 16#746f:16, 16#6d2d:16,
+ 16#6b65:16, 16#790c:16, 16#6375:16, 16#7374:16,
+ 16#6f6d:16, 16#2d76:16, 16#616c:16, 16#7565:16
+ >> = iolist_to_binary(EncData5),
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState5,
+ %% Stream: Decoder
+ {ok, EncState6} = execute_decoder_instructions(<<16#01>>, EncState5),
+ #state{
+ capacity=220,
+ size=160,
+ insert_count=3,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState6,
+ %% Stream: 8 (and Encoder)
+ {ok, Data7, EncData7, EncState7} = encode_field_section([
+ {<<":authority">>, <<"www.example.com">>},
+ {<<":path">>, <<"/">>},
+ {<<"custom-key">>, <<"custom-value">>}
+ ], 8, EncState6),
+ <<16#02>> = iolist_to_binary(EncData7),
+ <<
+ 16#0500:16,
+ 16#80,
+ 16#c1,
+ 16#81
+ >> = iolist_to_binary(Data7),
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState7,
+ %% Stream: Decoder
+ {ok, EncState8} = execute_decoder_instructions(<<16#48>>, EncState7),
+ #state{
+ capacity=220,
+ size=217,
+ insert_count=4,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}}
+ ]
+ } = EncState8,
+ %% Stream: Encoder
+ {ok, EncData9, EncState9} = encoder_insert_entry(
+ {<<"custom-key">>, <<"custom-value2">>},
+ EncState8, #{huffman => false}),
+ <<
+ 16#810d:16, 16#6375:16, 16#7374:16, 16#6f6d:16,
+ 16#2d76:16, 16#616c:16, 16#7565:16, 16#32
+ >> = iolist_to_binary(EncData9),
+ #state{
+ capacity=220,
+ size=215,
+ insert_count=5,
+ %% The dynamic table is in reverse order.
+ dyn_table=[
+ {55, {<<"custom-key">>, <<"custom-value2">>}},
+ {57, {<<":authority">>, <<"www.example.com">>}},
+ {54, {<<"custom-key">>, <<"custom-value">>}},
+ {49, {<<":path">>, <<"/sample/path">>}}
+ ]
+ } = EncState9,
+ ok.
+-endif.
+
+%% Static and dynamic tables.
+
+table_find_static({<<":authority">>, <<>>}) -> 0;
+table_find_static({<<":path">>, <<"/">>}) -> 1;
+table_find_static({<<"age">>, <<"0">>}) -> 2;
+table_find_static({<<"content-disposition">>, <<>>}) -> 3;
+table_find_static({<<"content-length">>, <<"0">>}) -> 4;
+table_find_static({<<"cookie">>, <<>>}) -> 5;
+table_find_static({<<"date">>, <<>>}) -> 6;
+table_find_static({<<"etag">>, <<>>}) -> 7;
+table_find_static({<<"if-modified-since">>, <<>>}) -> 8;
+table_find_static({<<"if-none-match">>, <<>>}) -> 9;
+table_find_static({<<"last-modified">>, <<>>}) -> 10;
+table_find_static({<<"link">>, <<>>}) -> 11;
+table_find_static({<<"location">>, <<>>}) -> 12;
+table_find_static({<<"referer">>, <<>>}) -> 13;
+table_find_static({<<"set-cookie">>, <<>>}) -> 14;
+table_find_static({<<":method">>, <<"CONNECT">>}) -> 15;
+table_find_static({<<":method">>, <<"DELETE">>}) -> 16;
+table_find_static({<<":method">>, <<"GET">>}) -> 17;
+table_find_static({<<":method">>, <<"HEAD">>}) -> 18;
+table_find_static({<<":method">>, <<"OPTIONS">>}) -> 19;
+table_find_static({<<":method">>, <<"POST">>}) -> 20;
+table_find_static({<<":method">>, <<"PUT">>}) -> 21;
+table_find_static({<<":scheme">>, <<"http">>}) -> 22;
+table_find_static({<<":scheme">>, <<"https">>}) -> 23;
+table_find_static({<<":status">>, <<"103">>}) -> 24;
+table_find_static({<<":status">>, <<"200">>}) -> 25;
+table_find_static({<<":status">>, <<"304">>}) -> 26;
+table_find_static({<<":status">>, <<"404">>}) -> 27;
+table_find_static({<<":status">>, <<"503">>}) -> 28;
+table_find_static({<<"accept">>, <<"*/*">>}) -> 29;
+table_find_static({<<"accept">>, <<"application/dns-message">>}) -> 30;
+table_find_static({<<"accept-encoding">>, <<"gzip, deflate, br">>}) -> 31;
+table_find_static({<<"accept-ranges">>, <<"bytes">>}) -> 32;
+table_find_static({<<"access-control-allow-headers">>, <<"cache-control">>}) -> 33;
+table_find_static({<<"access-control-allow-headers">>, <<"content-type">>}) -> 34;
+table_find_static({<<"access-control-allow-origin">>, <<"*">>}) -> 35;
+table_find_static({<<"cache-control">>, <<"max-age=0">>}) -> 36;
+table_find_static({<<"cache-control">>, <<"max-age=2592000">>}) -> 37;
+table_find_static({<<"cache-control">>, <<"max-age=604800">>}) -> 38;
+table_find_static({<<"cache-control">>, <<"no-cache">>}) -> 39;
+table_find_static({<<"cache-control">>, <<"no-store">>}) -> 40;
+table_find_static({<<"cache-control">>, <<"public, max-age=31536000">>}) -> 41;
+table_find_static({<<"content-encoding">>, <<"br">>}) -> 42;
+table_find_static({<<"content-encoding">>, <<"gzip">>}) -> 43;
+table_find_static({<<"content-type">>, <<"application/dns-message">>}) -> 44;
+table_find_static({<<"content-type">>, <<"application/javascript">>}) -> 45;
+table_find_static({<<"content-type">>, <<"application/json">>}) -> 46;
+table_find_static({<<"content-type">>, <<"application/x-www-form-urlencoded">>}) -> 47;
+table_find_static({<<"content-type">>, <<"image/gif">>}) -> 48;
+table_find_static({<<"content-type">>, <<"image/jpeg">>}) -> 49;
+table_find_static({<<"content-type">>, <<"image/png">>}) -> 50;
+table_find_static({<<"content-type">>, <<"text/css">>}) -> 51;
+table_find_static({<<"content-type">>, <<"text/html; charset=utf-8">>}) -> 52;
+table_find_static({<<"content-type">>, <<"text/plain">>}) -> 53;
+table_find_static({<<"content-type">>, <<"text/plain;charset=utf-8">>}) -> 54;
+table_find_static({<<"range">>, <<"bytes=0-">>}) -> 55;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000">>}) -> 56;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains">>}) -> 57;
+table_find_static({<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains; preload">>}) -> 58;
+table_find_static({<<"vary">>, <<"accept-encoding">>}) -> 59;
+table_find_static({<<"vary">>, <<"origin">>}) -> 60;
+table_find_static({<<"x-content-type-options">>, <<"nosniff">>}) -> 61;
+table_find_static({<<"x-xss-protection">>, <<"1; mode=block">>}) -> 62;
+table_find_static({<<":status">>, <<"100">>}) -> 63;
+table_find_static({<<":status">>, <<"204">>}) -> 64;
+table_find_static({<<":status">>, <<"206">>}) -> 65;
+table_find_static({<<":status">>, <<"302">>}) -> 66;
+table_find_static({<<":status">>, <<"400">>}) -> 67;
+table_find_static({<<":status">>, <<"403">>}) -> 68;
+table_find_static({<<":status">>, <<"421">>}) -> 69;
+table_find_static({<<":status">>, <<"425">>}) -> 70;
+table_find_static({<<":status">>, <<"500">>}) -> 71;
+table_find_static({<<"accept-language">>, <<>>}) -> 72;
+%% These two values are technically invalid. An errata has already
+%% been submitted to the RFC. We must however continue to include
+%% them in the table for compatibility.
+table_find_static({<<"access-control-allow-credentials">>, <<"FALSE">>}) -> 73;
+table_find_static({<<"access-control-allow-credentials">>, <<"TRUE">>}) -> 74;
+table_find_static({<<"access-control-allow-headers">>, <<"*">>}) -> 75;
+table_find_static({<<"access-control-allow-methods">>, <<"get">>}) -> 76;
+table_find_static({<<"access-control-allow-methods">>, <<"get, post, options">>}) -> 77;
+table_find_static({<<"access-control-allow-methods">>, <<"options">>}) -> 78;
+table_find_static({<<"access-control-expose-headers">>, <<"content-length">>}) -> 79;
+table_find_static({<<"access-control-request-headers">>, <<"content-type">>}) -> 80;
+table_find_static({<<"access-control-request-method">>, <<"get">>}) -> 81;
+table_find_static({<<"access-control-request-method">>, <<"post">>}) -> 82;
+table_find_static({<<"alt-svc">>, <<"clear">>}) -> 83;
+table_find_static({<<"authorization">>, <<>>}) -> 84;
+table_find_static({<<"content-security-policy">>, <<"script-src 'none'; object-src 'none'; base-uri 'none'">>}) -> 85;
+table_find_static({<<"early-data">>, <<"1">>}) -> 86;
+table_find_static({<<"expect-ct">>, <<>>}) -> 87;
+table_find_static({<<"forwarded">>, <<>>}) -> 88;
+table_find_static({<<"if-range">>, <<>>}) -> 89;
+table_find_static({<<"origin">>, <<>>}) -> 90;
+table_find_static({<<"purpose">>, <<"prefetch">>}) -> 91;
+table_find_static({<<"server">>, <<>>}) -> 92;
+table_find_static({<<"timing-allow-origin">>, <<"*">>}) -> 93;
+table_find_static({<<"upgrade-insecure-requests">>, <<"1">>}) -> 94;
+table_find_static({<<"user-agent">>, <<>>}) -> 95;
+table_find_static({<<"x-forwarded-for">>, <<>>}) -> 96;
+table_find_static({<<"x-frame-options">>, <<"deny">>}) -> 97;
+table_find_static({<<"x-frame-options">>, <<"sameorigin">>}) -> 98;
+table_find_static(_) -> not_found.
+
+table_find_name_static(<<":authority">>) -> 0;
+table_find_name_static(<<":path">>) -> 1;
+table_find_name_static(<<"age">>) -> 2;
+table_find_name_static(<<"content-disposition">>) -> 3;
+table_find_name_static(<<"content-length">>) -> 4;
+table_find_name_static(<<"cookie">>) -> 5;
+table_find_name_static(<<"date">>) -> 6;
+table_find_name_static(<<"etag">>) -> 7;
+table_find_name_static(<<"if-modified-since">>) -> 8;
+table_find_name_static(<<"if-none-match">>) -> 9;
+table_find_name_static(<<"last-modified">>) -> 10;
+table_find_name_static(<<"link">>) -> 11;
+table_find_name_static(<<"location">>) -> 12;
+table_find_name_static(<<"referer">>) -> 13;
+table_find_name_static(<<"set-cookie">>) -> 14;
+table_find_name_static(<<":method">>) -> 15;
+table_find_name_static(<<":scheme">>) -> 22;
+table_find_name_static(<<":status">>) -> 24;
+table_find_name_static(<<"accept">>) -> 29;
+table_find_name_static(<<"accept-encoding">>) -> 31;
+table_find_name_static(<<"accept-ranges">>) -> 32;
+table_find_name_static(<<"access-control-allow-headers">>) -> 33;
+table_find_name_static(<<"access-control-allow-origin">>) -> 35;
+table_find_name_static(<<"cache-control">>) -> 36;
+table_find_name_static(<<"content-encoding">>) -> 42;
+table_find_name_static(<<"content-type">>) -> 44;
+table_find_name_static(<<"range">>) -> 55;
+table_find_name_static(<<"strict-transport-security">>) -> 56;
+table_find_name_static(<<"vary">>) -> 59;
+table_find_name_static(<<"x-content-type-options">>) -> 61;
+table_find_name_static(<<"x-xss-protection">>) -> 62;
+table_find_name_static(<<"accept-language">>) -> 72;
+table_find_name_static(<<"access-control-allow-credentials">>) -> 73;
+table_find_name_static(<<"access-control-allow-methods">>) -> 76;
+table_find_name_static(<<"access-control-expose-headers">>) -> 79;
+table_find_name_static(<<"access-control-request-headers">>) -> 80;
+table_find_name_static(<<"access-control-request-method">>) -> 81;
+table_find_name_static(<<"alt-svc">>) -> 83;
+table_find_name_static(<<"authorization">>) -> 84;
+table_find_name_static(<<"content-security-policy">>) -> 85;
+table_find_name_static(<<"early-data">>) -> 86;
+table_find_name_static(<<"expect-ct">>) -> 87;
+table_find_name_static(<<"forwarded">>) -> 88;
+table_find_name_static(<<"if-range">>) -> 89;
+table_find_name_static(<<"origin">>) -> 90;
+table_find_name_static(<<"purpose">>) -> 91;
+table_find_name_static(<<"server">>) -> 92;
+table_find_name_static(<<"timing-allow-origin">>) -> 93;
+table_find_name_static(<<"upgrade-insecure-requests">>) -> 94;
+table_find_name_static(<<"user-agent">>) -> 95;
+table_find_name_static(<<"x-forwarded-for">>) -> 96;
+table_find_name_static(<<"x-frame-options">>) -> 97;
+table_find_name_static(_) -> not_found.
+
+table_get_static(0) -> {<<":authority">>, <<>>};
+table_get_static(1) -> {<<":path">>, <<"/">>};
+table_get_static(2) -> {<<"age">>, <<"0">>};
+table_get_static(3) -> {<<"content-disposition">>, <<>>};
+table_get_static(4) -> {<<"content-length">>, <<"0">>};
+table_get_static(5) -> {<<"cookie">>, <<>>};
+table_get_static(6) -> {<<"date">>, <<>>};
+table_get_static(7) -> {<<"etag">>, <<>>};
+table_get_static(8) -> {<<"if-modified-since">>, <<>>};
+table_get_static(9) -> {<<"if-none-match">>, <<>>};
+table_get_static(10) -> {<<"last-modified">>, <<>>};
+table_get_static(11) -> {<<"link">>, <<>>};
+table_get_static(12) -> {<<"location">>, <<>>};
+table_get_static(13) -> {<<"referer">>, <<>>};
+table_get_static(14) -> {<<"set-cookie">>, <<>>};
+table_get_static(15) -> {<<":method">>, <<"CONNECT">>};
+table_get_static(16) -> {<<":method">>, <<"DELETE">>};
+table_get_static(17) -> {<<":method">>, <<"GET">>};
+table_get_static(18) -> {<<":method">>, <<"HEAD">>};
+table_get_static(19) -> {<<":method">>, <<"OPTIONS">>};
+table_get_static(20) -> {<<":method">>, <<"POST">>};
+table_get_static(21) -> {<<":method">>, <<"PUT">>};
+table_get_static(22) -> {<<":scheme">>, <<"http">>};
+table_get_static(23) -> {<<":scheme">>, <<"https">>};
+table_get_static(24) -> {<<":status">>, <<"103">>};
+table_get_static(25) -> {<<":status">>, <<"200">>};
+table_get_static(26) -> {<<":status">>, <<"304">>};
+table_get_static(27) -> {<<":status">>, <<"404">>};
+table_get_static(28) -> {<<":status">>, <<"503">>};
+table_get_static(29) -> {<<"accept">>, <<"*/*">>};
+table_get_static(30) -> {<<"accept">>, <<"application/dns-message">>};
+table_get_static(31) -> {<<"accept-encoding">>, <<"gzip, deflate, br">>};
+table_get_static(32) -> {<<"accept-ranges">>, <<"bytes">>};
+table_get_static(33) -> {<<"access-control-allow-headers">>, <<"cache-control">>};
+table_get_static(34) -> {<<"access-control-allow-headers">>, <<"content-type">>};
+table_get_static(35) -> {<<"access-control-allow-origin">>, <<"*">>};
+table_get_static(36) -> {<<"cache-control">>, <<"max-age=0">>};
+table_get_static(37) -> {<<"cache-control">>, <<"max-age=2592000">>};
+table_get_static(38) -> {<<"cache-control">>, <<"max-age=604800">>};
+table_get_static(39) -> {<<"cache-control">>, <<"no-cache">>};
+table_get_static(40) -> {<<"cache-control">>, <<"no-store">>};
+table_get_static(41) -> {<<"cache-control">>, <<"public, max-age=31536000">>};
+table_get_static(42) -> {<<"content-encoding">>, <<"br">>};
+table_get_static(43) -> {<<"content-encoding">>, <<"gzip">>};
+table_get_static(44) -> {<<"content-type">>, <<"application/dns-message">>};
+table_get_static(45) -> {<<"content-type">>, <<"application/javascript">>};
+table_get_static(46) -> {<<"content-type">>, <<"application/json">>};
+table_get_static(47) -> {<<"content-type">>, <<"application/x-www-form-urlencoded">>};
+table_get_static(48) -> {<<"content-type">>, <<"image/gif">>};
+table_get_static(49) -> {<<"content-type">>, <<"image/jpeg">>};
+table_get_static(50) -> {<<"content-type">>, <<"image/png">>};
+table_get_static(51) -> {<<"content-type">>, <<"text/css">>};
+table_get_static(52) -> {<<"content-type">>, <<"text/html; charset=utf-8">>};
+table_get_static(53) -> {<<"content-type">>, <<"text/plain">>};
+table_get_static(54) -> {<<"content-type">>, <<"text/plain;charset=utf-8">>};
+table_get_static(55) -> {<<"range">>, <<"bytes=0-">>};
+table_get_static(56) -> {<<"strict-transport-security">>, <<"max-age=31536000">>};
+table_get_static(57) -> {<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains">>};
+table_get_static(58) -> {<<"strict-transport-security">>, <<"max-age=31536000; includesubdomains; preload">>};
+table_get_static(59) -> {<<"vary">>, <<"accept-encoding">>};
+table_get_static(60) -> {<<"vary">>, <<"origin">>};
+table_get_static(61) -> {<<"x-content-type-options">>, <<"nosniff">>};
+table_get_static(62) -> {<<"x-xss-protection">>, <<"1; mode=block">>};
+table_get_static(63) -> {<<":status">>, <<"100">>};
+table_get_static(64) -> {<<":status">>, <<"204">>};
+table_get_static(65) -> {<<":status">>, <<"206">>};
+table_get_static(66) -> {<<":status">>, <<"302">>};
+table_get_static(67) -> {<<":status">>, <<"400">>};
+table_get_static(68) -> {<<":status">>, <<"403">>};
+table_get_static(69) -> {<<":status">>, <<"421">>};
+table_get_static(70) -> {<<":status">>, <<"425">>};
+table_get_static(71) -> {<<":status">>, <<"500">>};
+table_get_static(72) -> {<<"accept-language">>, <<>>};
+%% These two values are technically invalid. An errata has already
+%% been submitted to the RFC. We must however continue to include
+%% them in the table for compatibility.
+table_get_static(73) -> {<<"access-control-allow-credentials">>, <<"FALSE">>};
+table_get_static(74) -> {<<"access-control-allow-credentials">>, <<"TRUE">>};
+table_get_static(75) -> {<<"access-control-allow-headers">>, <<"*">>};
+table_get_static(76) -> {<<"access-control-allow-methods">>, <<"get">>};
+table_get_static(77) -> {<<"access-control-allow-methods">>, <<"get, post, options">>};
+table_get_static(78) -> {<<"access-control-allow-methods">>, <<"options">>};
+table_get_static(79) -> {<<"access-control-expose-headers">>, <<"content-length">>};
+table_get_static(80) -> {<<"access-control-request-headers">>, <<"content-type">>};
+table_get_static(81) -> {<<"access-control-request-method">>, <<"get">>};
+table_get_static(82) -> {<<"access-control-request-method">>, <<"post">>};
+table_get_static(83) -> {<<"alt-svc">>, <<"clear">>};
+table_get_static(84) -> {<<"authorization">>, <<>>};
+table_get_static(85) -> {<<"content-security-policy">>, <<"script-src 'none'; object-src 'none'; base-uri 'none'">>};
+table_get_static(86) -> {<<"early-data">>, <<"1">>};
+table_get_static(87) -> {<<"expect-ct">>, <<>>};
+table_get_static(88) -> {<<"forwarded">>, <<>>};
+table_get_static(89) -> {<<"if-range">>, <<>>};
+table_get_static(90) -> {<<"origin">>, <<>>};
+table_get_static(91) -> {<<"purpose">>, <<"prefetch">>};
+table_get_static(92) -> {<<"server">>, <<>>};
+table_get_static(93) -> {<<"timing-allow-origin">>, <<"*">>};
+table_get_static(94) -> {<<"upgrade-insecure-requests">>, <<"1">>};
+table_get_static(95) -> {<<"user-agent">>, <<>>};
+table_get_static(96) -> {<<"x-forwarded-for">>, <<>>};
+table_get_static(97) -> {<<"x-frame-options">>, <<"deny">>};
+table_get_static(98) -> {<<"x-frame-options">>, <<"sameorigin">>}.
+
+table_get_name_static(0) -> <<":authority">>;
+table_get_name_static(1) -> <<":path">>;
+table_get_name_static(2) -> <<"age">>;
+table_get_name_static(3) -> <<"content-disposition">>;
+table_get_name_static(4) -> <<"content-length">>;
+table_get_name_static(5) -> <<"cookie">>;
+table_get_name_static(6) -> <<"date">>;
+table_get_name_static(7) -> <<"etag">>;
+table_get_name_static(8) -> <<"if-modified-since">>;
+table_get_name_static(9) -> <<"if-none-match">>;
+table_get_name_static(10) -> <<"last-modified">>;
+table_get_name_static(11) -> <<"link">>;
+table_get_name_static(12) -> <<"location">>;
+table_get_name_static(13) -> <<"referer">>;
+table_get_name_static(14) -> <<"set-cookie">>;
+table_get_name_static(15) -> <<":method">>;
+table_get_name_static(16) -> <<":method">>;
+table_get_name_static(17) -> <<":method">>;
+table_get_name_static(18) -> <<":method">>;
+table_get_name_static(19) -> <<":method">>;
+table_get_name_static(20) -> <<":method">>;
+table_get_name_static(21) -> <<":method">>;
+table_get_name_static(22) -> <<":scheme">>;
+table_get_name_static(23) -> <<":scheme">>;
+table_get_name_static(24) -> <<":status">>;
+table_get_name_static(25) -> <<":status">>;
+table_get_name_static(26) -> <<":status">>;
+table_get_name_static(27) -> <<":status">>;
+table_get_name_static(28) -> <<":status">>;
+table_get_name_static(29) -> <<"accept">>;
+table_get_name_static(30) -> <<"accept">>;
+table_get_name_static(31) -> <<"accept-encoding">>;
+table_get_name_static(32) -> <<"accept-ranges">>;
+table_get_name_static(33) -> <<"access-control-allow-headers">>;
+table_get_name_static(34) -> <<"access-control-allow-headers">>;
+table_get_name_static(35) -> <<"access-control-allow-origin">>;
+table_get_name_static(36) -> <<"cache-control">>;
+table_get_name_static(37) -> <<"cache-control">>;
+table_get_name_static(38) -> <<"cache-control">>;
+table_get_name_static(39) -> <<"cache-control">>;
+table_get_name_static(40) -> <<"cache-control">>;
+table_get_name_static(41) -> <<"cache-control">>;
+table_get_name_static(42) -> <<"content-encoding">>;
+table_get_name_static(43) -> <<"content-encoding">>;
+table_get_name_static(44) -> <<"content-type">>;
+table_get_name_static(45) -> <<"content-type">>;
+table_get_name_static(46) -> <<"content-type">>;
+table_get_name_static(47) -> <<"content-type">>;
+table_get_name_static(48) -> <<"content-type">>;
+table_get_name_static(49) -> <<"content-type">>;
+table_get_name_static(50) -> <<"content-type">>;
+table_get_name_static(51) -> <<"content-type">>;
+table_get_name_static(52) -> <<"content-type">>;
+table_get_name_static(53) -> <<"content-type">>;
+table_get_name_static(54) -> <<"content-type">>;
+table_get_name_static(55) -> <<"range">>;
+table_get_name_static(56) -> <<"strict-transport-security">>;
+table_get_name_static(57) -> <<"strict-transport-security">>;
+table_get_name_static(58) -> <<"strict-transport-security">>;
+table_get_name_static(59) -> <<"vary">>;
+table_get_name_static(60) -> <<"vary">>;
+table_get_name_static(61) -> <<"x-content-type-options">>;
+table_get_name_static(62) -> <<"x-xss-protection">>;
+table_get_name_static(63) -> <<":status">>;
+table_get_name_static(64) -> <<":status">>;
+table_get_name_static(65) -> <<":status">>;
+table_get_name_static(66) -> <<":status">>;
+table_get_name_static(67) -> <<":status">>;
+table_get_name_static(68) -> <<":status">>;
+table_get_name_static(69) -> <<":status">>;
+table_get_name_static(70) -> <<":status">>;
+table_get_name_static(71) -> <<":status">>;
+table_get_name_static(72) -> <<"accept-language">>;
+table_get_name_static(73) -> <<"access-control-allow-credentials">>;
+table_get_name_static(74) -> <<"access-control-allow-credentials">>;
+table_get_name_static(75) -> <<"access-control-allow-headers">>;
+table_get_name_static(76) -> <<"access-control-allow-methods">>;
+table_get_name_static(77) -> <<"access-control-allow-methods">>;
+table_get_name_static(78) -> <<"access-control-allow-methods">>;
+table_get_name_static(79) -> <<"access-control-expose-headers">>;
+table_get_name_static(80) -> <<"access-control-request-headers">>;
+table_get_name_static(81) -> <<"access-control-request-method">>;
+table_get_name_static(82) -> <<"access-control-request-method">>;
+table_get_name_static(83) -> <<"alt-svc">>;
+table_get_name_static(84) -> <<"authorization">>;
+table_get_name_static(85) -> <<"content-security-policy">>;
+table_get_name_static(86) -> <<"early-data">>;
+table_get_name_static(87) -> <<"expect-ct">>;
+table_get_name_static(88) -> <<"forwarded">>;
+table_get_name_static(89) -> <<"if-range">>;
+table_get_name_static(90) -> <<"origin">>;
+table_get_name_static(91) -> <<"purpose">>;
+table_get_name_static(92) -> <<"server">>;
+table_get_name_static(93) -> <<"timing-allow-origin">>;
+table_get_name_static(94) -> <<"upgrade-insecure-requests">>;
+table_get_name_static(95) -> <<"user-agent">>;
+table_get_name_static(96) -> <<"x-forwarded-for">>;
+table_get_name_static(97) -> <<"x-frame-options">>;
+table_get_name_static(98) -> <<"x-frame-options">>.
+
+table_insert(Entry={Name, Value}, State=#state{capacity=Capacity,
+ size=Size0, insert_count=InsertCount, dyn_table=DynamicTable0,
+ draining_size=DrainingSize}) ->
+ EntrySize = byte_size(Name) + byte_size(Value) + 32,
+ if
+ EntrySize + Size0 =< Capacity ->
+ {ok, State#state{size=Size0 + EntrySize, insert_count=InsertCount + 1,
+ dyn_table=[{EntrySize, Entry}|DynamicTable0]}};
+ EntrySize =< Capacity ->
+ {DynamicTable, Size} = table_evict(DynamicTable0,
+ Capacity - EntrySize, 0, []),
+ {ok, State#state{size=Size + EntrySize, insert_count=InsertCount + 1,
+ dyn_table=[{EntrySize, Entry}|DynamicTable],
+ %% We reduce the draining size by how much was gained from evicting.
+ draining_size=DrainingSize - (Size0 - Size)}};
+ true -> % EntrySize > Capacity ->
+ {connection_error, qpack_encoder_stream_error,
+ 'Entry size larger than table capacity. (RFC9204 3.2.2)'}
+ end.
+
+table_evict([], _, Size, Acc) ->
+ {lists:reverse(Acc), Size};
+table_evict([{EntrySize, _}|_], MaxSize, Size, Acc)
+ when Size + EntrySize > MaxSize ->
+ {lists:reverse(Acc), Size};
+table_evict([Entry = {EntrySize, _}|Tail], MaxSize, Size, Acc) ->
+ table_evict(Tail, MaxSize, Size + EntrySize, [Entry|Acc]).
+
+table_find_dyn(Entry, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ table_find_dyn(Entry, DynamicTable, InsertCount - 1).
+
+table_find_dyn(_, [], _) ->
+ not_found;
+table_find_dyn(Entry, [{_, Entry}|_], Index) ->
+ Index;
+table_find_dyn(Entry, [_|Tail], Index) ->
+ table_find_dyn(Entry, Tail, Index - 1).
+
+table_find_name_dyn(Name, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ table_find_name_dyn(Name, DynamicTable, InsertCount - 1).
+
+table_find_name_dyn(_, [], _) ->
+ not_found;
+table_find_name_dyn(Name, [{_, {Name, _}}|_], Index) ->
+ Index;
+table_find_name_dyn(Name, [_|Tail], Index) ->
+ table_find_name_dyn(Name, Tail, Index - 1).
+
+%% @todo These functions may error out if the encoder is invalid (2.2.3. Invalid References).
+table_get_dyn_abs(Index, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ {_, Header} = lists:nth(InsertCount - Index, DynamicTable),
+ Header.
+
+table_get_dyn_rel(Index, #state{dyn_table=DynamicTable}) ->
+ {_, Header} = lists:nth(1 + Index, DynamicTable),
+ Header.
+
+table_get_name_dyn_rel(Index, State) ->
+ {Name, _} = table_get_dyn_rel(Index, State),
+ Name.
+
+table_get_dyn_pre_base(Index, Base, #state{insert_count=InsertCount, dyn_table=DynamicTable}) ->
+ BaseOffset = InsertCount - Base,
+ {_, Header} = lists:nth(1 + Index + BaseOffset, DynamicTable),
+ Header.
+
+table_get_dyn_post_base(Index, Base, State) ->
+ table_get_dyn_abs(Base + Index, State).
+
+table_get_name_dyn_post_base(Index, Base, State) ->
+ {Name, _} = table_get_dyn_abs(Base + Index, State),
+ Name.
+
+-ifdef(TEST).
+do_init() ->
+ #state{
+ settings_received=false,
+ max_table_capacity=1000,
+ capacity=1000
+ }.
+
+do_table_insert(Entry, State0) ->
+ {ok, State} = table_insert(Entry, State0),
+ State.
+
+table_get_dyn_abs_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"a">>, <<"b">>} = table_get_dyn_abs(0, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_abs(1, State1),
+ {<<"e">>, <<"f">>} = table_get_dyn_abs(2, State1),
+ {<<"g">>, <<"h">>} = table_get_dyn_abs(3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"c">>, <<"d">>} = table_get_dyn_abs(1, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_abs(2, State2),
+ {<<"g">>, <<"h">>} = table_get_dyn_abs(3, State2),
+ ok.
+
+table_get_dyn_rel_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(0, State1),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(1, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(2, State1),
+ {<<"a">>, <<"b">>} = table_get_dyn_rel(3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(0, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(1, State2),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(2, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"i">>, <<"j">>} = table_get_dyn_rel(0, State3),
+ {<<"g">>, <<"h">>} = table_get_dyn_rel(1, State3),
+ {<<"e">>, <<"f">>} = table_get_dyn_rel(2, State3),
+ {<<"c">>, <<"d">>} = table_get_dyn_rel(3, State3),
+ ok.
+
+table_get_dyn_pre_base_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State1),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State1),
+ {<<"a">>, <<"b">>} = table_get_dyn_pre_base(2, 3, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State2),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_pre_base(0, 3, State3),
+ {<<"c">>, <<"d">>} = table_get_dyn_pre_base(1, 3, State3),
+ ok.
+
+table_get_dyn_post_base_test() ->
+ State0 = do_init(),
+ State1 = do_table_insert({<<"g">>, <<"h">>},
+ do_table_insert({<<"e">>, <<"f">>},
+ do_table_insert({<<"c">>, <<"d">>},
+ do_table_insert({<<"a">>, <<"b">>},
+ State0)))),
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State1),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State1),
+ %% Evict one member from the table.
+ #state{dyn_table=DynamicTable} = State1,
+ State2 = State1#state{dyn_table=lists:reverse(tl(lists:reverse(DynamicTable)))},
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State2),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State2),
+ %% Add a member to the table.
+ State3 = do_table_insert({<<"i">>, <<"j">>}, State2),
+ {<<"e">>, <<"f">>} = table_get_dyn_post_base(0, 2, State3),
+ {<<"g">>, <<"h">>} = table_get_dyn_post_base(1, 2, State3),
+ {<<"i">>, <<"j">>} = table_get_dyn_post_base(2, 2, State3),
+ ok.
+-endif.
diff --git a/src/cow_qs.erl b/src/cow_qs.erl
index d812e39..442ecc8 100644
--- a/src/cow_qs.erl
+++ b/src/cow_qs.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_spdy.erl b/src/cow_spdy.erl
index 8bda45b..e7b4043 100644
--- a/src/cow_spdy.erl
+++ b/src/cow_spdy.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2013-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2013-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_sse.erl b/src/cow_sse.erl
index 7aa98ce..6e7081f 100644
--- a/src/cow_sse.erl
+++ b/src/cow_sse.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2017-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2017-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -52,8 +52,9 @@ init() ->
%% @todo Add a function to retrieve the retry value from the state.
--spec parse(binary(), state())
- -> {event, parsed_event(), State} | {more, State}.
+-spec parse(binary(), State)
+ -> {event, parsed_event(), State} | {more, State}
+ when State::state().
parse(Data0, State=#state{state_name=bom, buffer=Buffer}) ->
Data1 = case Buffer of
<<>> -> Data0;
diff --git a/src/cow_uri.erl b/src/cow_uri.erl
index c0d9903..4480d6b 100644
--- a/src/cow_uri.erl
+++ b/src/cow_uri.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2016-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2016-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
diff --git a/src/cow_uri_template.erl b/src/cow_uri_template.erl
index eac784f..ccc355d 100644
--- a/src/cow_uri_template.erl
+++ b/src/cow_uri_template.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2019, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2019-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
@@ -310,6 +310,9 @@ urlencode_unreserved(<<C,R/bits>>, Acc) ->
urlencode_unreserved(<<>>, Acc) ->
Acc.
+urlencode_reserved(<<$%,H,L,R/bits>>, Acc)
+ when ?IS_HEX(H), ?IS_HEX(L) ->
+ urlencode_reserved(R, <<Acc/binary,$%,H,L>>);
urlencode_reserved(<<C,R/bits>>, Acc)
when ?IS_URI_UNRESERVED(C) or ?IS_URI_GEN_DELIMS(C) or ?IS_URI_SUB_DELIMS(C) ->
urlencode_reserved(R, <<Acc/binary,C>>);
@@ -336,6 +339,7 @@ expand_uritemplate_test_() ->
end
])),
fun() ->
+ io:format("expected: ~0p", [Expected]),
case Expected of
false ->
{'EXIT', _} = (catch expand(URITemplate, Vars));
diff --git a/src/cow_ws.erl b/src/cow_ws.erl
index 3bb46c5..27c7c87 100644
--- a/src/cow_ws.erl
+++ b/src/cow_ws.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2015-2018, Loïc Hoguin <[email protected]>
+%% Copyright (c) 2015-2023, Loïc Hoguin <[email protected]>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above