diff options
184 files changed, 16054 insertions, 7561 deletions
diff --git a/.gitattributes b/.gitattributes index 9ad9fa0..d25516d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,5 @@ +# Don't include Erlang.mk in diffs. erlang.mk -diff + +# Don't change line endings in our test data on Windows. +test/ws_perf_SUITE_data/*.txt -text diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..62e1ea2 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,69 @@ +## Use workflows from ninenines/ci.erlang.mk to test Cowboy. + +name: Check Cowboy + +on: + push: + branches: + - master + pull_request: + schedule: + ## Every Monday at 2am. + - cron: 0 2 * * 1 + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + cleanup-master: + name: Cleanup master build + runs-on: ubuntu-latest + if: ${{ github.event_name == 'schedule' }} + permissions: + actions: write + steps: + + - name: Cleanup master build if necessary + run: | + gh cache delete Linux-X64-Erlang-master -R $REPO || true + gh cache delete macOS-ARM64-Erlang-master -R $REPO || true + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + + check: + name: Cowboy + needs: cleanup-master + if: ${{ !cancelled() }} + uses: ninenines/ci.erlang.mk/.github/workflows/ci.yaml@master + +# The examples test suite is nice to run but typically not +# important. So we run them after we are done with the other +# test suites. At this point we know that Erlang was built +# so we can just use the latest version. + + examples: + name: Check examples + runs-on: 'ubuntu-latest' + steps: + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install latest Erlang/OTP + uses: erlef/setup-beam@v1 + with: + otp-version: latest + + - name: Run ct-examples + run: make ct-examples + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() + with: + name: Common Test logs (examples) + path: | + logs/ + !logs/**/log_private diff --git a/CONTRIBUTING.asciidoc b/CONTRIBUTING.asciidoc index 7a3bc84..df8469a 100644 --- a/CONTRIBUTING.asciidoc +++ b/CONTRIBUTING.asciidoc @@ -18,7 +18,7 @@ For general discussion about this project, please open a ticket. Feedback is always welcome and may transform in tasks to improve the project, so having the discussion start there is a plus. -Alternatively you may try the #ninenines IRC channel on Freenode, +Alternatively you may try the https://discord.gg/x468ZsxG[Discord server] or, if you need the discussion to stay private, you can send an email at [email protected]. @@ -1,4 +1,4 @@ -Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +Copyright (c) 2011-2025, Loïc Hoguin <[email protected]> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -2,13 +2,14 @@ PROJECT = cowboy PROJECT_DESCRIPTION = Small, fast, modern HTTP server. -PROJECT_VERSION = 2.8.0 +PROJECT_VERSION = 2.13.0 PROJECT_REGISTERED = cowboy_clock # Options. -PLT_APPS = public_key ssl +PLT_APPS = public_key ssl # ct_helper gun common_test inets CT_OPTS += -ct_hooks cowboy_ct_hook [] # -boot start_sasl +#CT_OPTS += +JPperf true +S 1 # Dependencies. @@ -16,7 +17,12 @@ LOCAL_DEPS = crypto DEPS = cowlib ranch dep_cowlib = git https://github.com/ninenines/cowlib master -dep_ranch = git https://github.com/ninenines/ranch 1.7.1 +dep_ranch = git https://github.com/ninenines/ranch 1.8.1 + +ifeq ($(COWBOY_QUICER),1) +DEPS += quicer +dep_quicer = git https://github.com/emqx/quic main +endif DOC_DEPS = asciideck @@ -29,19 +35,40 @@ dep_gun = git https://github.com/ninenines/gun master dep_ci.erlang.mk = git https://github.com/ninenines/ci.erlang.mk master DEP_EARLY_PLUGINS = ci.erlang.mk -AUTO_CI_OTP ?= OTP-LATEST-22+ -AUTO_CI_HIPE ?= OTP-LATEST -# AUTO_CI_ERLLVM ?= OTP-LATEST -AUTO_CI_WINDOWS ?= OTP-LATEST-22+ +AUTO_CI_OTP ?= OTP-LATEST-24+ +AUTO_CI_WINDOWS ?= OTP-LATEST-24+ + +# Hex configuration. + +define HEX_TARBALL_EXTRA_METADATA +#{ + licenses => [<<"ISC">>], + links => #{ + <<"User guide">> => <<"https://ninenines.eu/docs/en/cowboy/2.13/guide/">>, + <<"Function reference">> => <<"https://ninenines.eu/docs/en/cowboy/2.13/manual/">>, + <<"GitHub">> => <<"https://github.com/ninenines/cowboy">>, + <<"Sponsor">> => <<"https://github.com/sponsors/essen">> + } +} +endef + +hex_req_ranch = >= 1.8.0 and < 3.0.0 +hex_req_cowlib = >= 2.14.0 and < 3.0.0 # Standard targets. include erlang.mk -# Don't run the examples test suite by default. +# Don't run the examples/autobahn test suites by default. ifndef FULL -CT_SUITES := $(filter-out examples ws_autobahn,$(CT_SUITES)) +CT_SUITES := $(filter-out examples http_perf ws_autobahn ws_perf,$(CT_SUITES)) +endif + +# Don't run HTTP/3 test suites on Windows. + +ifeq ($(PLATFORM),msys2) +CT_SUITES := $(filter-out rfc9114 rfc9204 rfc9220,$(CT_SUITES)) endif # Compile options. @@ -49,13 +76,23 @@ endif ERLC_OPTS += +warn_missing_spec +warn_untyped_record # +bin_opt_info TEST_ERLC_OPTS += +'{parse_transform, eunit_autoexport}' +ifeq ($(COWBOY_QUICER),1) +ERLC_OPTS += -D COWBOY_QUICER=1 +TEST_ERLC_OPTS += -D COWBOY_QUICER=1 +endif + # Generate rebar.config on build. app:: rebar.config +# Fix quicer compilation for HTTP/3. + +autopatch-quicer:: + $(verbose) printf "%s\n" "all: ;" > $(DEPS_DIR)/quicer/c_src/Makefile.erlang.mk + # Dialyze the tests. -DIALYZER_OPTS += --src -r test +#DIALYZER_OPTS += --src -r test # h2spec setup. @@ -74,19 +111,14 @@ $(H2SPEC): $(verbose) git clone --depth 1 https://github.com/summerwind/h2spec $(dir $(H2SPEC)) || true $(verbose) $(MAKE) -C $(dir $(H2SPEC)) build MAKEFLAGS= || true -# Use erl_make_certs from the tested release during CI -# and ensure that ct_helper is always recompiled. - -ci-setup:: clean deps test-deps - $(gen_verbose) cp ~/.kerl/builds/$(CI_OTP_RELEASE)/otp_src_git/lib/ssl/test/erl_make_certs.erl deps/ct_helper/src/ || true - $(gen_verbose) $(MAKE) -C $(DEPS_DIR)/ct_helper clean app - # Prepare for the release. prepare_tag: + $(verbose) $(warning Hex metadata: $(HEX_TARBALL_EXTRA_METADATA)) + $(verbose) echo $(verbose) echo -n "Most recent tag: " - $(verbose) git tag | tail -n1 - $(verbose) git verify-tag `git tag | tail -n1` + $(verbose) git tag --sort taggerdate | tail -n1 + $(verbose) git verify-tag `git tag --sort taggerdate | tail -n1` $(verbose) echo -n "MAKEFILE: " $(verbose) grep -m1 PROJECT_VERSION Makefile $(verbose) echo -n "APP: " @@ -94,8 +126,11 @@ prepare_tag: $(verbose) echo -n "GUIDE: " $(verbose) grep -h dep_$(PROJECT)_commit doc/src/guide/*.asciidoc || true $(verbose) echo + $(verbose) echo "Links in the README:" + $(verbose) grep http.*:// README.asciidoc + $(verbose) echo $(verbose) echo "Titles in most recent CHANGELOG:" - $(verbose) for f in `ls -r doc/src/guide/migrating_from_*.asciidoc | head -n1`; do \ + $(verbose) for f in `ls -rv doc/src/guide/migrating_from_*.asciidoc | head -n1`; do \ echo $$f:; \ grep == $$f; \ done @@ -103,6 +138,7 @@ prepare_tag: $(verbose) echo "Dependencies:" $(verbose) grep ^DEPS Makefile || echo "DEPS =" $(verbose) grep ^dep_ Makefile || true + $(verbose) grep ^hex_req_ Makefile || true $(verbose) echo $(verbose) echo "rebar.config:" $(verbose) cat rebar.config || true diff --git a/README.asciidoc b/README.asciidoc index 1fa6d3f..5721a7b 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -18,8 +18,8 @@ Cowboy is *clean* and *well tested* Erlang code. == Online documentation -* https://ninenines.eu/docs/en/cowboy/2.6/guide[User guide] -* https://ninenines.eu/docs/en/cowboy/2.6/manual[Function reference] +* https://ninenines.eu/docs/en/cowboy/2.13/guide[User guide] +* https://ninenines.eu/docs/en/cowboy/2.13/manual[Function reference] == Offline documentation @@ -32,7 +32,7 @@ Cowboy is *clean* and *well tested* Erlang code. == Getting help -* Official IRC Channel: #ninenines on irc.freenode.net +* https://discord.gg/x25nNq2fFE[Discord server] * https://github.com/ninenines/cowboy/issues[Issues tracker] * https://ninenines.eu/services[Commercial Support] * https://github.com/sponsors/essen[Sponsor me!] diff --git a/doc/src/guide/book.asciidoc b/doc/src/guide/book.asciidoc index 0b01b22..58eda34 100644 --- a/doc/src/guide/book.asciidoc +++ b/doc/src/guide/book.asciidoc @@ -75,6 +75,16 @@ include::performance.asciidoc[Performance] = Additional information +include::migrating_from_2.12.asciidoc[Migrating from Cowboy 2.12 to 2.13] + +include::migrating_from_2.11.asciidoc[Migrating from Cowboy 2.11 to 2.12] + +include::migrating_from_2.10.asciidoc[Migrating from Cowboy 2.10 to 2.11] + +include::migrating_from_2.9.asciidoc[Migrating from Cowboy 2.9 to 2.10] + +include::migrating_from_2.8.asciidoc[Migrating from Cowboy 2.8 to 2.9] + include::migrating_from_2.7.asciidoc[Migrating from Cowboy 2.7 to 2.8] include::migrating_from_2.6.asciidoc[Migrating from Cowboy 2.6 to 2.7] diff --git a/doc/src/guide/constraints.asciidoc b/doc/src/guide/constraints.asciidoc index 6cc1075..4eade8a 100644 --- a/doc/src/guide/constraints.asciidoc +++ b/doc/src/guide/constraints.asciidoc @@ -91,6 +91,11 @@ int(forward, Value) -> The value must be returned even if it is not converted by the constraint. +The two other operations are currently experimental. They are +meant to help implement HATEOAS type services, but proper +support for HATEOAS is not expected to be available before +Cowboy 3.0 because of Cowboy's current router's limitations. + The `reverse` operation does the opposite: it takes a converted value and changes it back to what the user input would have been. diff --git a/doc/src/guide/getting_started.asciidoc b/doc/src/guide/getting_started.asciidoc index 7bf97e3..06677ee 100644 --- a/doc/src/guide/getting_started.asciidoc +++ b/doc/src/guide/getting_started.asciidoc @@ -62,14 +62,16 @@ handler. === Cowboy setup We will modify the 'Makefile' to tell the build system it needs to -fetch and compile Cowboy: +fetch and compile Cowboy, and that we will use releases: [source,makefile] ---- PROJECT = hello_erlang DEPS = cowboy -dep_cowboy_commit = 2.8.0 +dep_cowboy_commit = 2.13.0 + +REL_DEPS = relx DEP_PLUGINS = cowboy @@ -80,6 +82,9 @@ The `DEP_PLUGINS` line tells the build system to load the plugins Cowboy provides. These include predefined templates that we will use soon. +The `REL_DEPS` line tells the build system to fetch and build +`relx`, the library that will create the release. + If you do `make run` now, Cowboy will be included in the release and started automatically. This is not enough however, as Cowboy doesn't do anything by default. We still need to tell Cowboy to diff --git a/doc/src/guide/introduction.asciidoc b/doc/src/guide/introduction.asciidoc index f81c872..3a03a78 100644 --- a/doc/src/guide/introduction.asciidoc +++ b/doc/src/guide/introduction.asciidoc @@ -35,14 +35,14 @@ guarantee that the experience will be safe and smooth. You are advised to perform the necessary testing and security audits prior to deploying on other platforms. -Cowboy is developed for Erlang/OTP 22.0 and newer. +Cowboy is developed for Erlang/OTP 24.0 and newer. === License Cowboy uses the ISC License. ---- -Copyright (c) 2011-2019, Loïc Hoguin <[email protected]> +Copyright (c) 2011-2025, Loïc Hoguin <[email protected]> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/doc/src/guide/listeners.asciidoc b/doc/src/guide/listeners.asciidoc index 04169f9..40aff83 100644 --- a/doc/src/guide/listeners.asciidoc +++ b/doc/src/guide/listeners.asciidoc @@ -7,7 +7,7 @@ Depending on the connection handshake, one or another protocol may be used. This chapter is specific to Cowboy. Please refer to the -https://ninenines.eu/docs/en/ranch/1.3/guide/listeners/[Ranch User Guide] +https://ninenines.eu/docs/en/ranch/1.8/guide/listeners/[Ranch User Guide] for more information about listeners. Cowboy provides two types of listeners: one listening for diff --git a/doc/src/guide/loop_handlers.asciidoc b/doc/src/guide/loop_handlers.asciidoc index e574854..fc45d1c 100644 --- a/doc/src/guide/loop_handlers.asciidoc +++ b/doc/src/guide/loop_handlers.asciidoc @@ -31,7 +31,10 @@ for plain HTTP handlers. The `init/2` function must return a `cowboy_loop` tuple to enable loop handler behavior. This tuple may optionally contain the atom `hibernate` to make the process enter hibernation -until a message is received. +until a message is received. Alternatively, the tuple may +optionally contain a positive integer to create a `timeout` +message when the process has not received messages for too +long. This snippet enables the loop handler: @@ -49,6 +52,14 @@ init(Req, State) -> {cowboy_loop, Req, State, hibernate}. ---- +This makes the process time out after 1000ms of idle time. + +[source,erlang] +---- +init(Req, State) -> + {cowboy_loop, Req, State, 1000}. +---- + === Receive loop Once initialized, Cowboy will wait for messages to arrive @@ -123,3 +134,17 @@ messages received. This is done by returning the atom `hibernate` as part of the `loop` tuple callbacks normally return. Just add the atom at the end and Cowboy will hibernate accordingly. + +=== Idle timeout + +You may activate timeout events by returning a positive integer +`N` as part of the `loop` tuple callbacks return. The default +value is `infinity`. The `info` callback will be called with the +atom `timeout` unless a message is received within `N` milliseconds: + +[source,erlang] +---- +info(timeout, Req, State) -> + %% Do something... + {ok, Req, State, 1000}. +---- diff --git a/doc/src/guide/migrating_from_2.10.asciidoc b/doc/src/guide/migrating_from_2.10.asciidoc new file mode 100644 index 0000000..aaa8fe9 --- /dev/null +++ b/doc/src/guide/migrating_from_2.10.asciidoc @@ -0,0 +1,139 @@ +[appendix] +== Migrating from Cowboy 2.10 to 2.11 + +Cowboy 2.11 contains a variety of new features and bug +fixes. Nearly all previously experimental features are +now marked as stable, including Websocket over HTTP/2. +Included is a fix for an HTTP/2 protocol CVE. + +Cowboy 2.11 requires Erlang/OTP 24.0 or greater. + +Cowboy is now using GitHub Actions for CI. The main reason +for the move is to reduce costs by no longer having to +self-host CI runners. The downside is that GitHub runners +are less reliable and timing dependent tests are now more +likely to fail. + +=== Features added + +* A new HTTP/2 option `max_cancel_stream_rate` has been added + to control the rate of stream cancellation the server will + accept. By default Cowboy will accept 500 cancelled streams + every 10 seconds. + +* A new stream handler `cowboy_decompress_h` has been added. + It allows automatically decompressing incoming gzipped + request bodies. It includes options to protect against + zip bombs. + +* Websocket over HTTP/2 is no longer considered experimental. + Note that the `enable_connect_protocol` option must be set + to `true` in order to use Websocket over HTTP/2 for the + time being. + +* Automatic mode for reading request bodies has been + documented. In automatic mode, Cowboy waits indefinitely + for data and sends a `request_body` message when data + comes in. It mirrors `{active, once}` socket modes. + This is ideal for loop handlers and is also used + internally for HTTP/2 Websocket. + +* Ranged requests support is no longer considered + experimental. It was added in 2.6 to both `cowboy_static` + and `cowboy_rest`. Ranged responses can be produced + either automatically (for the `bytes` unit) or manually. + REST flowcharts have been updated with the new callbacks + and steps related to handling ranged requests. + +* A new HTTP/1.1 and HTTP/2 option `reset_idle_timeout_on_send` + has been added. When enabled, the `idle_timeout` will be + reset every time Cowboy sends data to the socket. + +* Loop handlers may now return a timeout value in the place + of `hibernate`. Timeouts behave the same as in `gen_server`. + +* The `generate_etag` callback of REST handlers now accepts + `undefined` as a return value to allow conditionally + generating etags. + +* The `cowboy_compress_h` options `compress_threshold` and + `compress_buffering` are no longer considered experimental. + They were de facto stable since 2.6 as they already were + documented. + +* Functions `cowboy:get_env/2,3` have been added. + +* Better error messages have been added when trying to send + a 204 or 304 response with a body; when attempting to + send two responses to a single request; when trying to + push a response after the final response; when trying + to send a `set-cookie` header without using + `cowboy_req:set_resp_cookie/3,4`. + +=== Features removed + +* Cowboy will no longer include the NPN extension when + starting a TLS listener. This extension has long been + deprecated and replaced with the ALPN extension. Cowboy + will continue using the ALPN extension for protocol + negotiation. + +=== Bugs fixed + +* A fix was made to address the HTTP/2 CVE CVE-2023-44487 + via the new HTTP/2 option `max_cancel_stream_rate`. + +* HTTP/1.1 requests that contain both a content-length and + a transfer-encoding header will now be rejected to avoid + security risks. Previous behavior was to ignore the + content-length header as recommended by the HTTP RFC. + +* HTTP/1.1 connections would sometimes use the wrong timeout + value to determine whether the connection should be closed. + This resulted in connections staying up longer than + intended. This should no longer be the case. + +* Cowboy now reacts to socket errors immediately for HTTP/1.1 + and HTTP/2 when possible. Cowboy will notice when connections + have been closed properly earlier than before. This also + means that the socket option `send_timeout_close` will work + as expected. + +* Shutting down HTTP/1.1 pipelined requests could lead to + the current request being terminated before the response + has been sent. This has been addressed. + +* When using HTTP/1.1 an invalid Connection header will now + be rejected with a 400 status code instead of crashing. + +* The documentation now recommends increasing the HTTP/2 + option `max_frame_size_received`. Cowboy currently uses + the protocol default but will increase its default in a + future release. Until then users are recommended to set + the option to ensure larger requests are accepted and + processed with acceptable performance. + +* Cowboy could sometimes send HTTP/2 WINDOW_UPDATE frames + twice in a row. Now they should be consolidated. + +* Cowboy would sometimes send HTTP/2 WINDOW_UPDATE frames + for streams that have stopped internally. This should + no longer be the case. + +* The `cowboy_compress_h` stream handler will no longer + attempt to compress responses that have an `etag` header + to avoid caching issues. + +* The `cowboy_compress_h` will now always add `accept-encoding` + to the `vary` header as it indicates that responses may + be compressed. + +* Cowboy will now remove the `trap_exit` process flag when + HTTP/1.1 connections upgrade to Websocket. + +* Exit gracefully instead of crashing when the socket gets + closed when reading the PROXY header. + +* Missing `cowboy_stream` manual pages have been added. + +* A number of fixes were made to documentation and examples. diff --git a/doc/src/guide/migrating_from_2.11.asciidoc b/doc/src/guide/migrating_from_2.11.asciidoc new file mode 100644 index 0000000..ab74642 --- /dev/null +++ b/doc/src/guide/migrating_from_2.11.asciidoc @@ -0,0 +1,15 @@ +[appendix] +== Migrating from Cowboy 2.11 to 2.12 + +Cowboy 2.12 contains a small security improvement for +the HTTP/2 protocol. + +Cowboy 2.12 requires Erlang/OTP 24.0 or greater. + +=== Features added + +* A new HTTP/2 option `max_fragmented_header_block_size` has + been added to limit the size of header blocks that are + sent over multiple HEADERS and CONTINUATION frames. + +* Update Cowlib to 2.13.0. diff --git a/doc/src/guide/migrating_from_2.12.asciidoc b/doc/src/guide/migrating_from_2.12.asciidoc new file mode 100644 index 0000000..d058db3 --- /dev/null +++ b/doc/src/guide/migrating_from_2.12.asciidoc @@ -0,0 +1,115 @@ +[appendix] +== Migrating from Cowboy 2.12 to 2.13 + +Cowboy 2.13 focuses on improving the performance of +Websocket, as well as the HTTP protocols. It also +contains a variety of new features and bug fixes. +In addition, Cowboy 2.13 is the first Cowboy version +that contains the experimental HTTP/3 support. + +Cowboy 2.13 requires Erlang/OTP 24.0 or greater. + +=== Features added + +* The option `dynamic_buffer` has been added. When + enabled, Cowboy will dynamically change the + `buffer` socket option based on how much data + it receives. It will start at 1024 bytes and + go up to 131072 bytes by default. This applies + to HTTP/1.1, HTTP/2 and Websocket. The performance + gains are very important depending on the scenario. + +* HTTP/1.1 and HTTP/2 now accept the `hibernate` + option. When set the connection process will + automatically hibernate to reduce memory usage + at a small performance cost. + +* The `protocols` and `alpn_default_protocol` protocol + options have been added to control exactly which + HTTP protocols are allowed over clear and TLS listeners. + +* The Websocket `max_frame_size` option can now be + set dynamically via the `set_options` command. + This allows configuring a smaller max size and + increase it after authentication or other checks. + +* `cowboy_req:set_resp_headers` now accept lists of + headers. This can be used to simplify passing + headers coming from client applications such as + Gun. Note that the set-cookie header cannot be + provided using this function. + +* `cowboy_rest` now always sets the allow header. + +* Update Ranch to 1.8.1. + +* Update Cowlib to 2.14.0. + +* When using Hex.pm, version check requirements will + now be relaxed. Cowboy will accept any Ranch version + from 1.8.0 to 2.2.0 as well as future 2.x versions. + Similarly, any Cowlib 2.x version from 2.14.0 will + be accepted. + +=== Experimental features added + +* Experimental support for HTTP/3 has been added, + including Websocket over HTTP/3. HTTP/3 support + is disabled by default; to enable, the environment + variable COWBOY_QUICER must be set at compile-time. + +=== Features deprecated + +* The `inactivity_timeout` option is now deprecated + for all protocols. It is de facto ignored when + `hibernate` is enabled. + +=== Optimisation-related changes + +* The behavior of the `idle_timeout` timer has been + changed for HTTP/2 and Websocket. Cowboy used to + reset the timer on every data packet received from + the socket. Now Cowboy will check periodically + whether new data was received in the interval. + +* URI and query string hex encoding and decoding has + been optimised. + +* Websocket UTF-8 validation of text frames has been + optimised. + +* Websocket unmasking has been optimised. + +=== Bugs fixed + +* HTTP/1.1 upgrade to HTTP/2 is now disabled over TLS, + as HTTP/2 over TLS must be negotiated via ALPN. + +* `cowboy_req:filter_cookies` could miss valid cookies. + It has been corrected. + +* HTTP/1.1 could get to a state where it would stop + receiving data from the socket, or buffer the data + without processing it, and the connection eventually + time out. This has been fixed. + +* Websocket did not compress zero-length frames properly. + This resulted in decompression errors in the client. + This has been corrected. + +* Websocket compression will now be disabled when only + the server sets `client_max_window_bits`, as otherwise + decompression errors will occur. + +* Websocket will now apply `max_frame_size` both to + compressed frames as well as the uncompressed payload. + Cowboy will stop decompressing when the limit is + reached. + +* Cowboy now properly handles exits of request processes + that occurred externally (e.g. via `exit/2`). + +* Invalid return values from `content_types_provided` + could result in an atom sent to the socket, leading + to a cryptic error message. The invalid value will + now result in a better error message. diff --git a/doc/src/guide/migrating_from_2.5.asciidoc b/doc/src/guide/migrating_from_2.5.asciidoc index b91b617..5196cc7 100644 --- a/doc/src/guide/migrating_from_2.5.asciidoc +++ b/doc/src/guide/migrating_from_2.5.asciidoc @@ -64,7 +64,7 @@ experimental. * Add automatic handling of range requests to REST handlers that return the callback `auto` from `ranges_accepted/2`. Cowboy will call the configured `ProvideCallback` and - then split the ouput automatically for the ranged response. + then split the output automatically for the ranged response. * Enable range requests support in `cowboy_static`. diff --git a/doc/src/guide/migrating_from_2.8.asciidoc b/doc/src/guide/migrating_from_2.8.asciidoc new file mode 100644 index 0000000..a3a0e7c --- /dev/null +++ b/doc/src/guide/migrating_from_2.8.asciidoc @@ -0,0 +1,50 @@ +[appendix] +== Migrating from Cowboy 2.8 to 2.9 + +Cowboy 2.9 implements graceful shutdown of connection +processes for both HTTP/1.1 and HTTP/2 connections. + +Cowboy 2.9 is the first release to support the much +awaited Erlang/OTP 24 out of the box. While users that +were using Ranch 2.0 already were ready for OTP 24, +the Ranch version used by Cowboy out of the box was +not compatible and had to be updated. + +Cowboy 2.9 also contains a small number of tweaks +and bug fixes. + +Cowboy 2.9 requires Erlang/OTP 22.0 or greater. + +=== Features added + +* Cowboy will now gracefully shutdown HTTP/1.1 and HTTP/2 + connections when the supervisor asks the connection + process to exit, or when `sys:terminate/2,3` is used. + Two new configuration options were added for HTTP/2 + to determine the timeouts for the graceful shutdown + steps. + +* REST handler `AcceptCallback` can now return `{created, URI}` + or `{see_other, URI}` to determine what response status code + should be sent (typically to differentiate between a new + resource and an update). The return value `{true, URI}` is + now deprecated. + +* Update Ranch to 1.8.0. + +* Update Cowlib to 2.11.0. + +=== Bugs fixed + +* Fix concurrent body streaming getting stuck with HTTP/2. + The alarm could get into blocking state indefinitely + when two or more request processes were streaming bodies. + +* Fix HTTP/2 rate limiting using the wrong default values + in some cases. + +* Don't produce an error report when the request process + exited normally (`normal` or `shutdown` exit reasons). + +* Fix `cowboy_tracer_h` to support trace messages without + timestamps. diff --git a/doc/src/guide/migrating_from_2.9.asciidoc b/doc/src/guide/migrating_from_2.9.asciidoc new file mode 100644 index 0000000..7395e5f --- /dev/null +++ b/doc/src/guide/migrating_from_2.9.asciidoc @@ -0,0 +1,42 @@ +[appendix] +== Migrating from Cowboy 2.9 to 2.10 + +Cowboy 2.10 is a maintenance release adding support +for Erlang/OTP 26. The main change is a Cowlib update +to fix a compilation error that only occurs starting +from OTP 26. + +Cowboy 2.10 requires Erlang/OTP 22.0 or greater. + +=== Features added + +* Add support for `Default` value of SameSite + cookie attribute. + +* Add support for the `stale-*` cache-control directives + from RFC 5861. + +* Update Cowlib to 2.12.1. + +=== Bugs fixed + +* Fix a compilation error in Cowlib when using Erlang/OTP 26. + +* Fix data sent after RST_STREAM in HTTP/2 in rare cases. + +* Fix parsing of RST_STREAM frames to properly handle + frames that have a valid length but were not fully + received yet. + +* Remove the obsolete `Version` cookie attribute. + +* Handle more edge cases for cookie parsing based on updates + to the RFC 6265bis draft. + +* Make Basic auth parsing ignore unknown authentication + parameters and generally update the code to conform + to RFC 7617. + +* Fix URI template reserved expansion of %-encoded. + +* Update structured headers implementation to RFC 8941. diff --git a/doc/src/guide/req.asciidoc b/doc/src/guide/req.asciidoc index 754e470..6b95228 100644 --- a/doc/src/guide/req.asciidoc +++ b/doc/src/guide/req.asciidoc @@ -258,7 +258,8 @@ contain two parameters of name `key`. The same is true when trying to use the PHP-style suffix `[]`. When a query string is `key[]=1&key[]=2`, the list returned will -contain two parameters of name `key[]`. +contain two parameters of name `key[]`. Cowboy does not require +the `[]` suffix to properly handle repeated key names. When a query string is simply `key`, Cowboy will return the list `[{<<"key">>, true}]`, using `true` to indicate that the @@ -291,9 +292,11 @@ If no default is provided and the value is missing, the query string is deemed invalid and the process will crash. When the query string is `key=1&key=2`, the value for `key` -will be the list `[1, 2]`. Parameter names do not need to -include the PHP-style suffix. Constraints may be used to -ensure that only one value was passed through. +will be the list `[<<"1">>, <<"2">>]`. Parameter names do not +need to include the PHP-style suffix. Constraints may be used +to ensure that only one value was given. Constraints do not +automatically look inside the list, a custom constraint must +be written if that is necessary. === Headers diff --git a/doc/src/guide/req_body.asciidoc b/doc/src/guide/req_body.asciidoc index 4906811..88389f6 100644 --- a/doc/src/guide/req_body.asciidoc +++ b/doc/src/guide/req_body.asciidoc @@ -74,17 +74,33 @@ only up to 1MB for up to 5 seconds: #{length => 1000000, period => 5000}). ---- -You may also disable the length limit: +These two options can effectively be used to control +the rate of transmission of the request body. + +It is also possible to asynchronously read the request +body using auto mode: [source,erlang] -{ok, Data, Req} = cowboy_req:read_body(Req0, #{length => infinity}). +---- +Ref = make_ref(), +cowboy_req:cast({read_body, self(), Ref, auto, infinity}, Req). +---- -This makes the function wait 15 seconds and return with -whatever arrived during that period. This is not -recommended for public facing applications. +Cowboy will wait indefinitely for data and then send a +`request_body` message as soon as it has data available, +regardless of length. -These two options can effectively be used to control -the rate of transmission of the request body. +[source,erlang] +---- +receive + {request_body, Ref, nofin, Data} -> + do_something(Data); + {request_body, Ref, fin, _BodyLen, Data} -> + do_something(Data) +end. +---- + +Asynchronous reading of data pairs well with loop handlers. === Streaming the body diff --git a/doc/src/guide/resource_design.asciidoc b/doc/src/guide/resource_design.asciidoc index 954d87d..125b437 100644 --- a/doc/src/guide/resource_design.asciidoc +++ b/doc/src/guide/resource_design.asciidoc @@ -144,6 +144,16 @@ never be called. Implement the `languages_provided` or `charsets_provided` callbacks if applicable. +Does the resource accept ranged requests? If it does, +implement the `ranges_provided` callback. Resources that +only accept `bytes` units can use the callback name +`auto` and let Cowboy automatically do ranged responses. +Other callbacks should have a name prefix of `ranged_` +for clarity. For example, `ranged_bytes` or `ranged_pages`. +If the resource needs to perform additional checks before +accepting to do a ranged responses, implement the +`range_satisfiable` callback. + Is there any other header that may make the representation of the resource vary? Implement the `variances` callback. @@ -191,10 +201,15 @@ the `options` method. === GET and HEAD methods If you implement the methods GET and/or HEAD, you must -implement one `ProvideResource` callback for each +implement one `ProvideCallback` callback for each content-type returned by the `content_types_provided` callback. +When range requests are accepted, you must implement one +`RangeCallback` for each range unit returned by +`ranges_provided` (unless `auto` was used). This is +in addition to the `ProvideCallback` callback. + === PUT, POST and PATCH methods If you implement the methods PUT, POST and/or PATCH, diff --git a/doc/src/guide/rest_conneg.png b/doc/src/guide/rest_conneg.png Binary files differindex 65ecdcf..79aa69b 100644 --- a/doc/src/guide/rest_conneg.png +++ b/doc/src/guide/rest_conneg.png diff --git a/doc/src/guide/rest_conneg.svg b/doc/src/guide/rest_conneg.svg index 247567a..97bba6a 100644 --- a/doc/src/guide/rest_conneg.svg +++ b/doc/src/guide/rest_conneg.svg @@ -2,24 +2,23 @@ <!-- Created with Inkscape (http://www.inkscape.org/) --> <svg - xmlns:osb="http://www.openswatchbook.org/uri/2009/osb" - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" - xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="744.09448819" height="1052.3622047" id="svg2" version="1.1" - inkscape:version="0.48.4 r9939" + inkscape:version="1.3.2 (091e20ef0f, 2023-11-25, custom)" sodipodi:docname="rest_conneg.svg" inkscape:export-filename="/home/essen/Dropbox/Public/drawing.png" inkscape:export-xdpi="90" - inkscape:export-ydpi="90"> + inkscape:export-ydpi="90" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:xlink="http://www.w3.org/1999/xlink" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> <defs id="defs4"> <linearGradient @@ -50,7 +49,7 @@ </linearGradient> <linearGradient id="linearGradient5233" - osb:paint="solid"> + inkscape:swatch="solid"> <stop style="stop-color:#69d2e7;stop-opacity:1;" offset="0" @@ -64,26 +63,34 @@ borderopacity="1.0" inkscape:pageopacity="1" inkscape:pageshadow="2" - inkscape:zoom="1.4142136" - inkscape:cx="222.80947" - inkscape:cy="634.56615" + inkscape:zoom="0.65304847" + inkscape:cx="259.55194" + inkscape:cy="483.11881" inkscape:document-units="px" inkscape:current-layer="layer1" showgrid="false" - inkscape:window-width="2560" - inkscape:window-height="1402" + inkscape:window-width="1440" + inkscape:window-height="900" inkscape:window-x="0" - inkscape:window-y="38" + inkscape:window-y="0" inkscape:window-maximized="1" inkscape:snap-global="true" - showguides="true"> + showguides="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1"> <inkscape:grid type="xygrid" id="grid5357" empspacing="5" - visible="true" + visible="false" enabled="true" - snapvisiblegridlinesonly="true" /> + snapvisiblegridlinesonly="true" + originx="0" + originy="0" + spacingy="1" + spacingx="1" + units="px" /> </sodipodi:namedview> <metadata id="metadata7"> @@ -93,7 +100,7 @@ <dc:format>image/svg+xml</dc:format> <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> + <dc:title /> </cc:Work> </rdf:RDF> </metadata> @@ -112,7 +119,7 @@ <g transform="translate(303.92143,-296.03137)" id="g5650-7" - inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-filename="rest_conneg.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path @@ -557,149 +564,149 @@ inkscape:export-ydpi="89.926643" /> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" x="-58.692513" y="114.39204" - id="text5371" - sodipodi:linespacing="125%"><tspan + id="text5371"><tspan sodipodi:role="line" id="tspan5373" x="-58.692513" - y="114.39204">some text</tspan></text> + y="114.39204" + style="font-size:16px;line-height:1.25;font-family:sans-serif">some text</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="-58.692513" y="53.112247" - id="text5371-2" - sodipodi:linespacing="125%"><tspan + id="text5371-2"><tspan sodipodi:role="line" id="tspan5373-6" x="-58.692513" - y="53.112247">some text</tspan></text> + y="53.112247" + style="font-size:16px;line-height:1.25;font-family:sans-serif">some text</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.18575" y="310.19913" id="text5371-2-3" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-7" x="246.18575" - y="310.19913">has accept-language?</tspan></text> + y="310.19913" + style="font-size:16px;line-height:1.25;font-family:sans-serif">has accept-language?</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="245.60762" y="477.47531" id="text5371-2-3-0" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-7-3" x="245.60762" - y="477.47531">has accept-charset?</tspan></text> + y="477.47531" + style="font-size:16px;line-height:1.25;font-family:sans-serif">has accept-charset?</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="-58.692513" y="236.95154" - id="text5371-4" - sodipodi:linespacing="125%"><tspan + id="text5371-4"><tspan sodipodi:role="line" id="tspan5373-9" x="-58.692513" - y="236.95154">some text</tspan></text> + y="236.95154" + style="font-size:16px;line-height:1.25;font-family:sans-serif">some text</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="245.00391" y="60.912468" id="text5371-4-0" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" x="245.00391" y="60.912468" - id="tspan17171">start</tspan></text> + id="tspan17171" + style="font-size:16px;line-height:1.25;font-family:sans-serif">start</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.11153" y="561.14258" id="text5371-2-9" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-8" x="246.11153" - y="561.14258">charsets_provided</tspan></text> + y="561.14258" + style="font-size:16px;line-height:1.25;font-family:sans-serif">charsets_provided</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.64278" y="646.58331" id="text5371-2-7" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-2" x="246.64278" - y="646.58331">variances</tspan></text> + y="646.58331" + style="font-size:16px;line-height:1.25;font-family:sans-serif">ranges_provided</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.13106" y="142.80627" id="text5371-2-95" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-80" x="246.13106" - y="142.80627">has accept?</tspan></text> + y="142.80627" + style="font-size:16px;line-height:1.25;font-family:sans-serif">has accept?</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="245.80684" y="226.4736" id="text5371-2-32" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-27" x="245.80684" - y="226.4736">content_types_provided</tspan></text> + y="226.4736" + style="font-size:16px;line-height:1.25;font-family:sans-serif">content_types_provided</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.05293" y="393.80801" id="text5371-2-74" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-18" x="246.05293" - y="393.80801">languages_provided</tspan></text> + y="393.80801" + style="font-size:16px;line-height:1.25;font-family:sans-serif">languages_provided</tspan></text> <rect style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5273-1-2" @@ -710,88 +717,88 @@ rx="15" /> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="185.95248" id="text5371-2-391" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-63" x="262.26562" - y="185.95248">true</tspan></text> + y="185.95248" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="269.61978" id="text5371-2-954" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-808" x="262.26562" - y="269.61978">provided*</tspan></text> + y="269.61978" + style="font-size:16px;line-height:1.25;font-family:sans-serif">provided*</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="353.28702" id="text5371-2-4" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-11" x="262.26562" - y="353.28702">true</tspan></text> + y="353.28702" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="436.95425" id="text5371-2-92" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-4" x="262.26562" - y="436.95425">provided*</tspan></text> + y="436.95425" + style="font-size:16px;line-height:1.25;font-family:sans-serif">provided*</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="520.62152" id="text5371-2-739" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-0" x="262.26562" - y="520.62152">true</tspan></text> + y="520.62152" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" x="262.26562" y="604.28876" id="text5371-2-8" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-3" x="262.26562" - y="604.28876">provided*</tspan></text> + y="604.28876" + style="font-size:16px;line-height:1.25;font-family:sans-serif">provided*</tspan></text> <g transform="matrix(0,-1,1,0,-513.31414,353.05561)" id="g5650-2"> @@ -820,75 +827,75 @@ </g> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="76.761719" y="227.88033" id="text5371-4-6" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-0" x="76.761719" - y="227.88033">false</tspan></text> + y="227.88033" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="76.761719" y="395.20209" id="text5371-4-2" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-01" x="76.761719" - y="395.20209">false</tspan></text> + y="395.20209" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="413.26172" y="374.19577" id="text5371-4-3" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-62" x="413.26172" - y="374.19577">not provided*</tspan></text> + y="374.19577" + style="font-size:16px;line-height:1.25;font-family:sans-serif">not provided*</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="76.761719" y="562.52386" id="text5371-4-4" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-2" x="76.761719" - y="562.52386">false</tspan></text> + y="562.52386" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="-8.8034744" y="663.24762" id="text5371-4-5" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" - transform="matrix(0.7410941,-0.67140117,0.67140117,0.7410941,0,0)"><tspan + transform="rotate(-42.1753)"><tspan sodipodi:role="line" id="tspan5373-9-09" x="-8.8034744" - y="663.24762">not provided*</tspan></text> + y="663.24762" + style="font-size:16px;line-height:1.25;font-family:sans-serif">not provided*</tspan></text> <rect style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5273-7-3-1" @@ -931,18 +938,18 @@ </g> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" x="599.20062" y="394.09869" id="text5371-43" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3" x="599.20062" - y="394.09869">406 not acceptable</tspan></text> + y="394.09869" + style="font-size:16px;line-height:1.25;font-family:sans-serif">406 not acceptable</tspan></text> <rect style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:2.44279909;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5367" @@ -956,19 +963,19 @@ inkscape:export-ydpi="89.926643" /> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="-544.69421" y="-354.17184" id="text5371-2-3-0-7" - sodipodi:linespacing="125%" - transform="matrix(0,-1,1,0,0,0)" + transform="rotate(-90)" inkscape:export-filename="/home/essen/extend/cowboy/guide/http_req_resp.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-7-3-9" x="-544.69421" - y="-354.17184">middlewares</tspan></text> + y="-354.17184" + style="font-size:16px;line-height:1.25;font-family:sans-serif">middlewares</tspan></text> <g transform="matrix(0,-1,1,0,-508.93096,565.23553)" id="g5650-2-0-4" @@ -1093,19 +1100,48 @@ </g> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" x="509.41452" y="-106.16136" id="text5371-4-5-9" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" - transform="matrix(0.69480867,0.71919462,-0.71919462,0.69480867,0,0)"><tspan + transform="rotate(45.988027)"><tspan sodipodi:role="line" id="tspan5373-9-09-1" x="509.41452" - y="-106.16136">not provided*</tspan></text> + y="-106.16136" + style="font-size:16px;line-height:1.25;font-family:sans-serif">not provided*</tspan></text> + <g + transform="translate(303.92156,372.14538)" + id="g5650-6-2-7" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-78-4-5" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-9-2-3" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> <rect style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5273-1-41-0" @@ -1119,17 +1155,42 @@ inkscape:export-ydpi="89.926643" /> <text xml:space="preserve" - style="font-size:16px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" x="246.15048" - y="725.27777" + y="730.10156" id="text5371-2-7-9" - sodipodi:linespacing="125%" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-2-1" x="246.15048" - y="725.27777">...</tspan></text> + y="730.10156" + style="font-size:16px;line-height:1.25;font-family:sans-serif">variances</tspan></text> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-41-0-2" + width="210.17955" + height="35.209244" + x="141.049" + y="789.44257" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="246.15047" + y="808.03937" + id="text5371-2-7-9-9" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-2-1-1" + x="246.15047" + y="808.03937" + style="font-size:16px;line-height:1.25;font-family:sans-serif">...</tspan></text> </g> </svg> diff --git a/doc/src/guide/rest_flowcharts.asciidoc b/doc/src/guide/rest_flowcharts.asciidoc index 308a919..b8d0e0d 100644 --- a/doc/src/guide/rest_flowcharts.asciidoc +++ b/doc/src/guide/rest_flowcharts.asciidoc @@ -95,6 +95,11 @@ callback will only be called at the end of the "GET and HEAD methods" diagram, when all conditions have been met. +Optionally, the `ranges_provided` also returns the +name of a callback for every range unit it accepts. This +will be called at the end of the "GET and HEAD methods" +diagram in the case of ranged requests. + The selected content-type, language and charset are saved as meta values in the Req object. You *should* use the appropriate representation if you set a @@ -121,11 +126,18 @@ succeed, the resource can be retrieved. Cowboy prepares the response by first retrieving metadata about the representation, then by calling -the `ProvideResource` callback. This is the callback +the `ProvideCallback` callback. This is the callback you defined for each content-types you returned from `content_types_provided`. This callback returns the body -that will be sent back to the client, or a fun if the -body must be streamed. +that will be sent back to the client. + +For ranged requests, but only when the `ranges_provided` +callback was defined earlier, Cowboy will add the selected +`range` information to the Req object and call the +`range_satisfiable` callback. After confirming that the +range can be provided, Cowboy calls the `RangeResource` +callback and produces a ranged response using the +ranged data from the callback. When the resource does not exist, Cowboy will figure out whether the resource existed previously, and if so whether diff --git a/doc/src/guide/rest_get_head.png b/doc/src/guide/rest_get_head.png Binary files differindex 211ab60..24f8de4 100644 --- a/doc/src/guide/rest_get_head.png +++ b/doc/src/guide/rest_get_head.png diff --git a/doc/src/guide/rest_get_head.svg b/doc/src/guide/rest_get_head.svg index 92030cf..cf66089 100644 --- a/doc/src/guide/rest_get_head.svg +++ b/doc/src/guide/rest_get_head.svg @@ -2,24 +2,23 @@ <!-- Created with Inkscape (http://www.inkscape.org/) --> <svg - xmlns:osb="http://www.openswatchbook.org/uri/2009/osb" - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" - xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="744.09448819" height="1052.3622047" id="svg2" version="1.1" - inkscape:version="0.92.1 r" + inkscape:version="1.3.2 (091e20ef0f, 2023-11-25, custom)" sodipodi:docname="rest_get_head.svg" inkscape:export-filename="/home/essen/Dropbox/Public/drawing.png" inkscape:export-xdpi="90" - inkscape:export-ydpi="90"> + inkscape:export-ydpi="90" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:xlink="http://www.w3.org/1999/xlink" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> <defs id="defs4"> <linearGradient @@ -50,7 +49,7 @@ </linearGradient> <linearGradient id="linearGradient5233" - osb:paint="solid"> + inkscape:swatch="solid"> <stop style="stop-color:#69d2e7;stop-opacity:1;" offset="0" @@ -64,26 +63,34 @@ borderopacity="1.0" inkscape:pageopacity="1" inkscape:pageshadow="2" - inkscape:zoom="1.4142136" - inkscape:cx="353.51266" - inkscape:cy="522.73683" + inkscape:zoom="0.66612663" + inkscape:cx="444.35996" + inkscape:cy="784.3854" inkscape:document-units="px" inkscape:current-layer="layer1" showgrid="false" - inkscape:window-width="1920" - inkscape:window-height="1043" + inkscape:window-width="1440" + inkscape:window-height="900" inkscape:window-x="0" inkscape:window-y="0" inkscape:window-maximized="1" inkscape:snap-global="true" - showguides="true"> + showguides="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1"> <inkscape:grid type="xygrid" id="grid5357" empspacing="5" - visible="true" + visible="false" enabled="true" - snapvisiblegridlinesonly="true" /> + snapvisiblegridlinesonly="true" + originx="0" + originy="0" + spacingy="1" + spacingx="1" + units="px" /> </sodipodi:namedview> <metadata id="metadata7"> @@ -101,6 +108,223 @@ inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1"> + <g + transform="rotate(57.423293,-360.21231,998.44074)" + id="g5650-2-6-7" + inkscape:export-filename="rest_get_head.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-2-10-6" + d="M -57.78256,275.13761 V 404.27753" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-6-7" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="rotate(90,13.664042,694.60225)" + id="g5650-2-2-17-9"> + <path + inkscape:connector-curvature="0" + id="path5570-2-1-0-4" + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-5-45-9" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="rotate(142.84301,-18.689938,738.63976)" + id="g5650-2-2-17-9-1"> + <path + inkscape:connector-curvature="0" + id="path5570-2-1-0-4-2" + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-5-45-9-9" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="rotate(57.423293,-61.041025,828.63629)" + id="g5650-2-6"> + <path + inkscape:connector-curvature="0" + id="path5570-2-10" + d="M -57.78256,275.13761 V 404.27753" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-6" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="matrix(0.53842825,-0.84267136,-0.84267136,-0.53842825,668.22258,813.26466)" + id="g5650-2-6-0"> + <path + inkscape:connector-curvature="0" + id="path5570-2-10-9" + d="M -57.78256,275.13761 V 404.27753" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-6-1" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="matrix(0.7948003,-0.60687106,-0.60687106,-0.7948003,570.69023,937.57491)" + id="g5650-2-6-0-1"> + <path + inkscape:connector-curvature="0" + id="path5570-2-10-9-1" + d="m -56.935096,252.07781 -0.800167,152.0263" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2.35779;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-12-6-1-5" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="translate(656.82134,696.68054)" + id="g5650-93-2-6-9-5-7-3-5-9" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9-3-1-4-3" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2-6-9-7-7" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="translate(400.94834,696.68054)" + id="g5650-93-2-6-9-5-7-3-5" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9-3-1-4" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2-6-9-7" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> <path inkscape:export-ydpi="89.926643" inkscape:export-xdpi="89.926643" @@ -110,7 +334,7 @@ d="m -360.31658,371.70113 203.00246,0.045" style="fill:none;stroke:#6d8e41;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:1.99999999, 3.99999998;stroke-dashoffset:0" /> <g - transform="translate(416.63925,-305.0045)" + transform="translate(674.63925,-305.0045)" id="g5650-7" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -118,8 +342,8 @@ <path inkscape:connector-curvature="0" id="path5570-9" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -135,11 +359,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-0" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,-59.451492)" + transform="translate(528.88515,-59.451492)" id="g5650-0" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -147,8 +371,8 @@ <path inkscape:connector-curvature="0" id="path5570-5" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -164,11 +388,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-1" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,22.975441)" + transform="translate(528.88515,22.975441)" id="g5650-94" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -176,8 +400,8 @@ <path inkscape:connector-curvature="0" id="path5570-71" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -193,11 +417,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-5" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,105.29639)" + transform="translate(528.88515,105.29639)" id="g5650-93" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -205,8 +429,8 @@ <path inkscape:connector-curvature="0" id="path5570-3" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -222,11 +446,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-4" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,187.81342)" + transform="translate(240.08356,270.33438)" id="g5650-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -234,8 +458,8 @@ <path inkscape:connector-curvature="0" id="path5570-93" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -251,11 +475,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-04" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,270.7128)" + transform="translate(240.08356,353.23338)" id="g5650-6" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -263,8 +487,8 @@ <path inkscape:connector-curvature="0" id="path5570-78" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -280,11 +504,11 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-9" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="translate(270.88515,-141.93971)" + transform="translate(528.88515,-141.93971)" id="g5650-0-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -292,8 +516,8 @@ <path inkscape:connector-curvature="0" id="path5570-5-3" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -309,65 +533,65 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-1-1" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-28" width="210.17955" height="35.209244" - x="108.01281" + x="366.01282" y="275.4668" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" /> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-5" width="210.17955" height="35.209244" - x="108.01281" + x="366.01282" y="357.98779" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" /> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-8" width="210.17955" height="35.209244" - x="108.01281" + x="366.01282" y="440.50873" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" /> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-4" width="210.17955" height="35.209244" - x="108.01281" - y="523.02966" + x="81.249367" + y="605.55103" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" /> <g - transform="matrix(0,-1,1,0,-314.06239,730.23773)" + transform="rotate(-90,235.94692,576.81149)" id="g5650-2-0-4-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path - style="fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" + style="fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" d="m 146.3711,152.21525 c 0,0 -65.760927,60.81119 -8.48528,132.93608" id="path20172-9-5" inkscape:connector-curvature="0" - transform="matrix(0,1,-1,0,248.85545,276.69312)" /> + transform="rotate(90,-13.918835,262.77429)" /> <path transform="matrix(0.38224114,0.44664484,-0.44664484,0.38224114,233.48523,355.54168)" inkscape:transform-center-y="-1.0388082" @@ -383,29 +607,29 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-1-7-2" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-87" width="210.17955" height="35.209244" - x="108.01281" - y="605.5506" + x="81.249367" + y="688.07202" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643" /> <g - transform="matrix(0.70710678,-0.70710678,0.70710678,0.70710678,261.76354,-143.71473)" + transform="rotate(-45,86.402746,-699.267)" id="g5650-2-2-8"> <path inkscape:connector-curvature="0" id="path5570-2-1-4" - d="m -57.78256,343.20394 0,61.59661" - style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -421,15 +645,15 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-5-3" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1" width="210.17955" height="35.209244" - x="253.76691" + x="511.76691" y="112.69559" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -506,7 +730,7 @@ y="0" xlink:href="#rect5273-22" id="use5355" - transform="translate(530.58261,-183.7816)" + transform="translate(788.58261,-183.7816)" width="744.09448" height="1052.3622" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -537,7 +761,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="213.07524" + x="471.07526" y="379.78381" id="text5371-2-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -545,22 +769,47 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-7" - x="213.07524" + x="471.07526" y="379.78381" style="font-size:16px;line-height:1.25;font-family:sans-serif">last_modified</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="212.74321" - y="546.59912" + x="185.63515" + y="629.15564" id="text5371-2-3-0" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-7-3" - x="212.74321" - y="546.59912" + x="185.63515" + y="629.15564" + style="font-size:16px;line-height:1.25;font-family:sans-serif">ProvideCallback</tspan></text> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-4-4" + width="210.17955" + height="35.209244" + x="238.076" + y="1018.396" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="342.46179" + y="1042.0006" + id="text5371-2-3-0-5" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-7-3-0" + x="342.46179" + y="1042.0006" style="font-size:16px;line-height:1.25;font-family:sans-serif">ProvideCallback</tspan></text> <text xml:space="preserve" @@ -576,35 +825,35 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="359.14185" - y="50.482433" + x="617.14185" + y="50.114025" id="text5371-4-0" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" - x="359.14185" - y="50.482433" + x="617.14185" + y="50.114025" id="tspan17171" style="font-size:16px;line-height:1.25;font-family:sans-serif">conneg</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="212.77055" - y="627.34662" + x="186.01915" + y="709.83667" id="text5371-2-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-8" - x="212.77055" - y="627.34662" + x="186.01915" + y="709.83667" style="font-size:16px;line-height:1.25;font-family:sans-serif">multiple_choices</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="358.52466" + x="616.52466" y="134.49161" id="text5371-2-95" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -612,13 +861,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-80" - x="358.52466" + x="616.52466" y="134.49161" style="font-size:16px;line-height:1.25;font-family:sans-serif">resource_exists</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="213.38774" + x="471.38776" y="296.80188" id="text5371-2-32" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -626,13 +875,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-27" - x="213.38774" + x="471.38776" y="296.80188" style="font-size:16px;line-height:1.25;font-family:sans-serif">generate_etag</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="213.05571" + x="471.05573" y="462.5274" id="text5371-2-74" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -640,9 +889,362 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-18" - x="213.05571" + x="471.05573" y="462.5274" style="font-size:16px;line-height:1.25;font-family:sans-serif">expires</tspan></text> + <g + transform="translate(528.88515,270.08038)" + id="g5650-93-2" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93" + width="210.17955" + height="35.209244" + x="366.013" + y="605.5506" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.63077" + y="627.31525" + id="text5371-2-74-6" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0" + x="470.63077" + y="627.31525" + style="font-size:16px;line-height:1.25;font-family:sans-serif">has range?</tspan></text> + <g + transform="translate(528.88515,187.69716)" + id="g5650-93-2-6" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93-1" + width="210.17955" + height="35.209244" + x="366.013" + y="523.02966" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.63077" + y="544.79431" + id="text5371-2-74-6-8" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0-7" + x="470.63077" + y="544.79431" + style="font-size:16px;line-height:1.25;font-family:sans-serif">resource provides ranges?</tspan></text> + <g + transform="translate(528.88515,353.49259)" + id="g5650-93-2-6-9" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93-1-2" + width="210.17955" + height="35.209244" + x="366.013" + y="687.83093" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.63077" + y="709.63556" + id="text5371-2-74-6-8-3" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0-7-7" + x="470.63077" + y="709.63556" + style="font-size:16px;line-height:1.25;font-family:sans-serif">has if-range?</tspan></text> + <g + transform="translate(528.88515,435.58038)" + id="g5650-93-2-6-9-5" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93-1-2-2" + width="210.17955" + height="35.209244" + x="366.013" + y="770.83313" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.63077" + y="792.59778" + id="text5371-2-74-6-8-3-8" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0-7-7-9" + x="470.63077" + y="792.59778" + style="font-size:16px;line-height:1.25;font-family:sans-serif">requested range provided?</tspan></text> + <g + transform="translate(528.88515,518.28038)" + id="g5650-93-2-6-9-5-7" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9-3" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2-6" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93-1-2-2-1" + width="210.17955" + height="35.209244" + x="366.013" + y="853.35419" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.83078" + y="875.15881" + id="text5371-2-74-6-8-3-8-2" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0-7-7-9-9" + x="470.83078" + y="875.15881" + style="font-size:16px;line-height:1.25;font-family:sans-serif">range_satisfiable</tspan></text> + <g + transform="rotate(-34.012747,1234.1863,-299.61425)" + id="g5650-93-2-6-9-5-7-3" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9-3-1" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2-6-9" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <g + transform="rotate(33.477859,-837.91737,1442.2703)" + id="g5650-93-2-6-9-5-7-3-8" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"> + <path + inkscape:connector-curvature="0" + id="path5570-3-7-2-2-9-3-1-43" + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> + <path + transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" + inkscape:transform-center-y="2.1823437" + d="m -222.73865,430.10821 -12.85982,-22.27386 25.71964,0 z" + inkscape:randomized="0" + inkscape:rounded="0" + inkscape:flatsided="true" + sodipodi:arg2="2.6179939" + sodipodi:arg1="1.5707963" + sodipodi:r2="7.4246211" + sodipodi:r1="14.849242" + sodipodi:cy="415.25897" + sodipodi:cx="-222.73865" + sodipodi:sides="3" + id="path5576-4-0-6-0-2-6-9-1" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + sodipodi:type="star" /> + </g> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-8-93-1-2-2-1-4" + width="210.17955" + height="35.209244" + x="366.013" + y="935.875" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="470.94278" + y="957.45563" + id="text5371-2-74-6-8-3-8-2-7" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-18-0-7-7-9-9-8" + x="470.94278" + y="957.45563" + style="font-size:16px;line-height:1.25;font-family:sans-serif">automatic range?</tspan></text> <rect style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5273-1-2" @@ -654,7 +1256,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="251.83722" + x="509.83722" y="175.92931" id="text5371-2-391" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -662,22 +1264,120 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-63" - x="251.83722" + x="509.83722" y="175.92931" style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="161.54012" - y="670.38055" + x="336.74399" + y="1000.7801" + id="text5371-2-391-4" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9" + x="336.74399" + y="1000.7801" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="484.61441" + y="669.48737" + id="text5371-2-391-4-6" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9-8" + x="484.61441" + y="669.48737" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="484.61441" + y="587.08673" + id="text5371-2-391-4-9" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9-2" + x="484.61441" + y="587.08673" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="397.49249" + y="751.30035" + id="text5371-2-391-4-66" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9-4" + x="397.49249" + y="751.30035" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false, or</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="484.61441" + y="834.89026" + id="text5371-2-391-4-95" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9-0" + x="484.61441" + y="834.89026" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="484.61441" + y="917.41119" + id="text5371-2-391-4-4" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-9-87" + x="484.61441" + y="917.41119" + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="570.74402" + y="1000.7801" + id="text5371-2-391-2" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-63-0" + x="570.74402" + y="1000.7801" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="130.73801" + y="752.90198" id="text5371-2-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-3" - x="161.54012" - y="670.38055" + x="130.73801" + y="752.90198" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <g transform="matrix(0,-1,1,0,-513.31414,353.05561)" @@ -708,7 +1408,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="447.26678" + x="705.26678" y="176.39024" id="text5371-4-6" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -716,17 +1416,111 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-0" - x="447.26678" + x="705.26678" y="176.39024" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" + x="334.72018" + y="614.93298" + id="text5371-4-6-1" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-9-0-5" + x="334.72018" + y="614.93298" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="312.42276" + y="575.93713" + id="text5371-4-6-1-0" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-9-0-5-8" + x="312.42276" + y="575.93713" + style="font-size:16px;line-height:1.25;font-family:sans-serif;fill:#6d8e41;fill-opacity:1">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" + x="323.40799" + y="900.71991" + id="text5371-4-6-1-3" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-9-0-5-6" + x="323.40799" + y="900.71991" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" + x="318.40799" + y="744.23999" + id="text5371-4-6-1-9" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-9-0-5-7" + x="318.40799" + y="744.23999" + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" + x="372.94284" + y="665.52533" + id="text5371-4-6-1-7" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + x="372.94284" + y="665.52533" + style="font-size:16px;line-height:1.25;font-family:sans-serif" + id="tspan10">no match</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:end;letter-spacing:0px;word-spacing:0px;text-anchor:end;fill:#9b3b1c;fill-opacity:1;stroke:none" + x="218.43921" + y="1004.6766" + id="text5371-4-6-1-7-3" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + x="218.43921" + y="1004.6766" + style="font-size:16px;line-height:1.25;font-family:sans-serif;text-align:end;text-anchor:end" + id="tspan10-9">error producing</tspan><tspan + sodipodi:role="line" + x="218.43921" + y="1024.6765" + style="font-size:16px;line-height:1.25;font-family:sans-serif;text-align:end;text-anchor:end" + id="tspan11">automatic</tspan><tspan + sodipodi:role="line" + x="218.43921" + y="1044.6765" + style="font-size:16px;line-height:1.25;font-family:sans-serif;text-align:end;text-anchor:end" + id="tspan12">ranged response</tspan></text> <g - transform="translate(563.72619,-141.76777)" + transform="translate(821.72619,-141.76777)" id="g5650-2-2"> <path inkscape:connector-curvature="0" id="path5570-2-1" - d="m -57.78256,343.20394 0,61.59661" - style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -742,7 +1536,7 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-5" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <rect @@ -805,21 +1599,21 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="46.409981" - y="711.18011" + x="23.607994" + y="793.70099" id="text5371-4-3-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-62-9" - x="46.409981" - y="711.18011" + x="23.607994" + y="793.70099" style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="545.12921" + x="803.12921" y="257.43518" id="text5371-4-4-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -827,11 +1621,11 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-2-6" - x="545.12921" + x="803.12921" y="257.43518" style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <g - transform="matrix(0.70710678,0.70710678,-0.70710678,0.70710678,547.47379,-62.310424)" + transform="rotate(45,477.95223,941.13721)" id="g5650-9-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -839,8 +1633,8 @@ <path inkscape:connector-curvature="0" id="path5570-7-6" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -856,7 +1650,7 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-09-5" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <use @@ -864,7 +1658,7 @@ y="0" xlink:href="#rect5273-22" id="use5355-0" - transform="translate(384.82851,-20.897068)" + transform="translate(642.82851,-20.897068)" width="744.09448" height="1052.3622" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -873,24 +1667,24 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="213.38774" + x="471.38776" y="216.5154" id="text5371-4-0-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" - x="213.38774" + x="471.38776" y="216.5154" id="tspan17171-6" style="font-size:16px;line-height:1.25;font-family:sans-serif">cond</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-1" width="218.52127" height="34.993004" - x="103.84195" - y="770.70062" + x="77.078499" + y="853.2215" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -898,24 +1692,24 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="212.88774" - y="792.61121" + x="186.33914" + y="874.87799" id="text5371-43-5" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-62" - x="212.88774" - y="792.61121" + x="186.33914" + y="874.87799" style="font-size:16px;line-height:1.25;font-family:sans-serif">300 multiple choices</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-8" width="218.52127" height="34.993004" - x="103.84195" - y="688.17969" + x="77.078499" + y="770.7005" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -923,29 +1717,137 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="212.35258" - y="711.50043" + x="185.95512" + y="793.91699" id="text5371-43-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-1" - x="212.35258" - y="711.50043" + x="185.95512" + y="793.91699" style="font-size:16px;line-height:1.25;font-family:sans-serif">200 OK</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="360.77811" + y="1077.916" + id="text5371-2-8-2" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-3-0" + x="360.77811" + y="1077.916" + style="font-size:16px;line-height:1.25;font-family:sans-serif">automatic</tspan><tspan + sodipodi:role="line" + x="360.77811" + y="1097.916" + style="font-size:16px;line-height:1.25;font-family:sans-serif" + id="tspan4">ranged response</tspan></text> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;font-size:16px;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" + x="484.05441" + y="751.09668" + id="text5371-2-8-2-4" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + x="484.05441" + y="751.09668" + style="font-size:16px;line-height:1.25;font-family:sans-serif" + id="tspan6">strong etag match</tspan></text> + <rect + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-7-3-1-8-6" + width="218.52127" + height="34.993004" + x="233.9435" + y="1114.3829" + rx="12.372616" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" + x="342.99612" + y="1137.7036" + id="text5371-43-3-1" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-3-1-5" + x="342.99612" + y="1137.7036" + style="font-size:16px;line-height:1.25;font-family:sans-serif">206 partial content</tspan></text> + <rect + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-1-4-4-6" + width="210.17955" + height="35.209244" + x="493.94901" + y="1018.396" + rx="15" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" + x="598.33478" + y="1040.1606" + id="text5371-2-3-0-5-5" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-6-7-3-0-6" + x="598.33478" + y="1040.1606" + style="font-size:16px;line-height:1.25;font-family:sans-serif">RangeCallback</tspan></text> + <rect + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-7-3-1-8-6-4" + width="218.52127" + height="34.993004" + x="489.77835" + y="1114.1394" + rx="12.372616" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" + x="598.83099" + y="1137.4601" + id="text5371-43-3-1-7" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + id="tspan5373-3-1-5-4" + x="598.83099" + y="1137.4601" + style="font-size:16px;line-height:1.25;font-family:sans-serif">206 partial content</tspan></text> <g - transform="matrix(0,-1,-1,0,1028.2004,317.70407)" + transform="matrix(0,-1,-1,0,1286.2004,317.70407)" id="g5650-2-0-4-3-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path - style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" + style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" d="m 146.3711,152.21525 c 0,0 -65.760927,60.81119 -8.48528,132.93608" id="path20172-9-8-9" inkscape:connector-curvature="0" - transform="matrix(0,1,-1,0,248.85545,276.69312)" /> + transform="rotate(90,-13.918835,262.77429)" /> <path transform="matrix(0.38224114,0.44664484,-0.44664484,0.38224114,233.48523,355.54168)" inkscape:transform-center-y="-1.0388082" @@ -961,16 +1863,16 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-1-7-58-6" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-28-4" width="210.17955" height="35.209244" - x="400.85385" + x="658.85388" y="192.94594" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -979,7 +1881,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="505.77957" + x="763.77954" y="216.51541" id="text5371-2-32-6" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -987,13 +1889,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-27-8" - x="505.77957" + x="763.77954" y="216.51541" style="font-size:16px;line-height:1.25;font-family:sans-serif">has if-match?</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="651.74426" + x="875.74426" y="299.27689" id="text5371-2-391-5" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1001,11 +1903,11 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-63-7" - x="651.74426" + x="875.74426" y="299.27689" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <g - transform="translate(563.72619,22.800669)" + transform="translate(821.72619,22.800669)" id="g5650-0-6" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" @@ -1013,8 +1915,8 @@ <path inkscape:connector-curvature="0" id="path5570-5-7" - d="m -57.78256,351.41962 0,52.3259" - style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,351.41962 v 52.3259" + style="opacity:0.8;fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -1030,21 +1932,21 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-1-3" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="matrix(0,-1,-1,0,1027.6701,482.30508)" + transform="matrix(0,-1,-1,0,1285.6701,482.30508)" id="g5650-2-0-4-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path - style="fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" + style="fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" d="m 146.3711,152.21525 c 0,0 -65.760927,60.81119 -8.48528,132.93608" id="path20172-9-8" inkscape:connector-curvature="0" - transform="matrix(0,1,-1,0,248.85545,276.69312)" /> + transform="rotate(90,-13.918835,262.77429)" /> <path transform="matrix(0.38224114,0.44664484,-0.44664484,0.38224114,233.48523,355.54168)" inkscape:transform-center-y="-1.0388082" @@ -1060,16 +1962,16 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-1-7-58" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-28-7" width="210.17955" height="35.209244" - x="400.85385" + x="658.85388" y="357.98779" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1078,7 +1980,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="505.94363" + x="763.9436" y="379.78381" id="text5371-2-32-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1086,15 +1988,15 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-27-1" - x="505.94363" + x="763.9436" y="379.78381" style="font-size:16px;line-height:1.25;font-family:sans-serif">previously_existed</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-5" width="218.52127" height="34.993004" - x="396.68301" + x="654.68298" y="440.37622" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1103,7 +2005,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="506.2796" + x="764.2796" y="463.83755" id="text5371-43-2" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1111,13 +2013,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-9" - x="506.2796" + x="764.2796" y="463.83755" style="font-size:16px;line-height:1.25;font-family:sans-serif">404 not found</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="528.75421" + x="786.75421" y="422.69736" id="text5371-2-8-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1125,17 +2027,17 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-3-4" - x="528.75421" + x="786.75421" y="422.69736" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <g - transform="translate(563.72619,187.85116)" + transform="translate(821.72619,187.85116)" id="g5650-2-2-17"> <path inkscape:connector-curvature="0" id="path5570-2-1-0" - d="m -57.78256,343.20394 0,61.59661" - style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -1151,21 +2053,21 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-5-45" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="matrix(0,-1,-1,0,1027.4933,646.81763)" + transform="matrix(0,-1,-1,0,1285.4933,646.81763)" id="g5650-2-0-4-3-8-0-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path - style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" + style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" d="m 146.3711,152.21525 c 0,0 -65.760927,60.81119 -8.48528,132.93608" id="path20172-9-8-9-31-8" inkscape:connector-curvature="0" - transform="matrix(0,1,-1,0,248.85545,276.69312)" /> + transform="rotate(90,-13.918835,262.77429)" /> <path transform="matrix(0.38224114,0.44664484,-0.44664484,0.38224114,233.48523,355.54168)" inkscape:transform-center-y="-1.0388082" @@ -1181,16 +2083,16 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-1-7-58-6-3-6" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-8-9" width="210.17955" height="35.209244" - x="400.85385" + x="658.85388" y="523.02966" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1199,7 +2101,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="505.45535" + x="763.45532" y="544.82568" id="text5371-2-74-0" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1207,7 +2109,7 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-18-2" - x="505.45535" + x="763.45532" y="544.82568" style="font-size:16px;line-height:1.25;font-family:sans-serif">moved_permanently</tspan></text> <g @@ -1242,11 +2144,11 @@ inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1" width="218.52127" height="34.993004" - x="396.68301" + x="654.68298" y="275.81555" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1255,7 +2157,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="506.2796" + x="764.2796" y="297.7261" id="text5371-43" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1263,13 +2165,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3" - x="506.2796" + x="764.2796" y="297.7261" style="font-size:16px;line-height:1.25;font-family:sans-serif">412 precondition failed</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="668.11926" + x="892.11926" y="463.37662" id="text5371-4-3-9-8" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1277,13 +2179,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-62-9-9" - x="668.11926" + x="892.11926" y="463.37662" style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="549.12921" + x="807.12921" y="587.59863" id="text5371-4-4-9-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1291,13 +2193,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-2-6-2" - x="549.12921" + x="807.12921" y="587.59863" style="font-size:16px;line-height:1.25;font-family:sans-serif">true*</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="651.74426" + x="875.74426" y="628.87946" id="text5371-2-391-5-5-1" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1305,15 +2207,15 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-63-7-9-9" - x="651.74426" + x="875.74426" y="628.87946" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-5-9" width="218.52127" height="34.993004" - x="396.68301" + x="654.68298" y="605.41809" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1322,7 +2224,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="505.57257" + x="763.57257" y="627.32867" id="text5371-43-2-9" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1330,17 +2232,17 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-9-7" - x="505.57257" + x="763.57257" y="627.32867" style="font-size:16px;line-height:1.25;font-family:sans-serif">301 moved permanently</tspan></text> <g - transform="translate(563.72619,353.12604)" + transform="translate(821.72619,353.12604)" id="g5650-2-2-65"> <path inkscape:connector-curvature="0" id="path5570-2-1-5" - d="m -57.78256,343.20394 0,61.59661" - style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" /> + d="m -57.78256,343.20394 v 61.59661" + style="opacity:0.8;fill:none;stroke:#9b3b1c;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /> <path transform="matrix(0.58787746,0,0,0.58787746,73.160466,163.35774)" inkscape:transform-center-y="2.1823437" @@ -1356,21 +2258,21 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-5-5" - style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" /> </g> <g - transform="matrix(0,-1,-1,0,1032.2004,811.50699)" + transform="matrix(0,-1,-1,0,1290.2004,811.50699)" id="g5650-2-0-4-3-8-0-3-7" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" inkscape:export-xdpi="89.926643" inkscape:export-ydpi="89.926643"> <path - style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" + style="fill:none;stroke:#6d8e41;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" d="m 146.3711,152.21525 c 0,0 -65.760927,60.81119 -8.48528,132.93608" id="path20172-9-8-9-31-8-1" inkscape:connector-curvature="0" - transform="matrix(0,1,-1,0,248.85545,276.69312)" /> + transform="rotate(90,-13.918835,262.77429)" /> <path transform="matrix(0.38224114,0.44664484,-0.44664484,0.38224114,233.48523,355.54168)" inkscape:transform-center-y="-1.0388082" @@ -1386,16 +2288,16 @@ sodipodi:cx="-222.73865" sodipodi:sides="3" id="path5576-12-1-7-58-6-3-6-3" - style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="opacity:0.8;fill:#6d8e41;fill-opacity:1;fill-rule:nonzero;stroke:#6d8e41;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" sodipodi:type="star" inkscape:transform-center-x="4.5299474" /> </g> <rect - style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#d1f2a5;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-1-8-9-1" width="210.17955" height="35.209244" - x="400.85385" + x="658.85388" y="687.83093" rx="15" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1404,7 +2306,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" - x="505.45535" + x="763.45532" y="709.62695" id="text5371-2-74-0-7" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1412,13 +2314,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-18-2-8" - x="505.45535" + x="763.45532" y="709.62695" style="font-size:16px;line-height:1.25;font-family:sans-serif">moved_temporarily</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" - x="549.12921" + x="807.12921" y="752.64056" id="text5371-4-4-9-3-2" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1426,13 +2328,13 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-9-2-6-2-6" - x="549.12921" + x="807.12921" y="752.64056" style="font-size:16px;line-height:1.25;font-family:sans-serif">true*</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" - x="651.74426" + x="875.74426" y="794.16199" id="text5371-2-391-5-5-1-0" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1440,15 +2342,15 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-6-63-7-9-9-2" - x="651.74426" + x="875.74426" y="794.16199" style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-5-9-2" width="218.52127" height="34.993004" - x="396.68301" + x="654.68298" y="770.70062" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1457,7 +2359,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="505.57257" + x="763.57257" y="792.61121" id="text5371-43-2-9-7" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1465,15 +2367,15 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-9-7-8" - x="505.57257" + x="763.57257" y="792.61121" style="font-size:16px;line-height:1.25;font-family:sans-serif">307 moved temporarily</tspan></text> <rect - style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.73499846;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-1-5-9-2-4" width="218.52127" height="34.993004" - x="396.68301" + x="654.68298" y="853.22168" rx="12.372616" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1482,7 +2384,7 @@ <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" - x="505.97882" + x="763.97882" y="874.99164" id="text5371-43-2-9-7-3" inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" @@ -1490,7 +2392,7 @@ inkscape:export-ydpi="89.926643"><tspan sodipodi:role="line" id="tspan5373-3-9-7-8-7" - x="505.97882" + x="763.97882" y="874.99164" style="font-size:16px;line-height:1.25;font-family:sans-serif">410 gone</tspan></text> <g @@ -1519,5 +2421,30 @@ style="opacity:0.8;fill:#9b3b1c;fill-opacity:1;fill-rule:nonzero;stroke:#9b3b1c;stroke-width:0;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" sodipodi:type="star" /> </g> + <rect + style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:2.735;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="rect5273-7-3-1-8-6-4-8" + width="218.52127" + height="34.993004" + x="77.078499" + y="935.98315" + rx="12.372616" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643" /> + <text + xml:space="preserve" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" + x="186.57913" + y="959.51965" + id="text5371-43-3-1-7-6" + inkscape:export-filename="/home/essen/ninenines/cowboy/guide/rest_options.png" + inkscape:export-xdpi="89.926643" + inkscape:export-ydpi="89.926643"><tspan + sodipodi:role="line" + x="186.57913" + y="959.51965" + style="font-size:16px;line-height:1.25;font-family:sans-serif" + id="tspan8">416 not satisfiable</tspan></text> </g> </svg> diff --git a/doc/src/guide/rest_handlers.asciidoc b/doc/src/guide/rest_handlers.asciidoc index baf8e6a..19a9859 100644 --- a/doc/src/guide/rest_handlers.asciidoc +++ b/doc/src/guide/rest_handlers.asciidoc @@ -84,6 +84,8 @@ if it is undefined, moving directly to the next step. Similarly, | multiple_choices | `false` | options | `ok` | previously_existed | `false` +| ranges_provided | skip +| range_satisfiable | `true` | rate_limited | `false` | resource_exists | `true` | service_available | `true` @@ -97,8 +99,9 @@ As you can see, Cowboy tries to move on with the request whenever possible by using well thought out default values. In addition to these, there can be any number of user-defined -callbacks that are specified through `content_types_accepted/2` -and `content_types_provided/2`. They can take any name, however +callbacks that are specified through `content_types_accepted/2`, +`content_types_provided/2` or `ranges_provided/2`. They can take +any name (except `auto` for range callbacks), however it is recommended to use a separate prefix for the callbacks of each function. For example, `from_html` and `to_html` indicate in the first case that we're accepting a resource given as HTML, @@ -113,9 +116,10 @@ Req object directly. The values are defined in the following table: [cols="<,<",options="header"] |=== | Key | Details -| media_type | The content-type negotiated for the response entity. -| language | The language negotiated for the response entity. -| charset | The charset negotiated for the response entity. +| media_type | The content-type negotiated for the response entity +| language | The language negotiated for the response entity +| charset | The charset negotiated for the response entity +| range | The range selected for the ranged response |=== They can be used to send a proper body with the response to a @@ -129,11 +133,16 @@ of the REST code. They are listed in the following table. [cols="<,<",options="header"] |=== | Header name | Details +| accept-ranges | Range units accepted by the resource +| allow | HTTP methods allowed by the resource | content-language | Language used in the response body +| content-range | Range of the content found in the response | content-type | Media type and charset of the response body | etag | Etag of the resource | expires | Expiration date of the resource | last-modified | Last modification date for the resource | location | Relative or absolute URI to the requested resource +| retry-after | Delay or time the client should wait before accessing the resource | vary | List of headers that may change the representation of the resource +| www-authenticate | Authentication information to access the resource |=== diff --git a/doc/src/guide/rest_start.png b/doc/src/guide/rest_start.png Binary files differindex 4c230a0..bbaa655 100644 --- a/doc/src/guide/rest_start.png +++ b/doc/src/guide/rest_start.png diff --git a/doc/src/guide/rest_start.svg b/doc/src/guide/rest_start.svg index 6f1dd87..9a2646f 100644 --- a/doc/src/guide/rest_start.svg +++ b/doc/src/guide/rest_start.svg @@ -2,24 +2,23 @@ <!-- Created with Inkscape (http://www.inkscape.org/) --> <svg - xmlns:osb="http://www.openswatchbook.org/uri/2009/osb" - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" - xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="744.09448819" height="1052.3622047" id="svg2" version="1.1" - inkscape:version="0.92.2 2405546, 2018-03-11" + inkscape:version="1.3.2 (091e20ef0f, 2023-11-25, custom)" sodipodi:docname="rest_start.svg" inkscape:export-filename="/home/essen/Dropbox/Public/drawing.png" inkscape:export-xdpi="90" - inkscape:export-ydpi="90"> + inkscape:export-ydpi="90" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:xlink="http://www.w3.org/1999/xlink" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> <defs id="defs4"> <linearGradient @@ -50,7 +49,7 @@ </linearGradient> <linearGradient id="linearGradient5233" - osb:paint="solid"> + inkscape:swatch="solid"> <stop style="stop-color:#69d2e7;stop-opacity:1;" offset="0" @@ -65,25 +64,34 @@ inkscape:pageopacity="1" inkscape:pageshadow="2" inkscape:zoom="1.0000001" - inkscape:cx="213.11305" - inkscape:cy="726.77495" + inkscape:cx="293.49997" + inkscape:cy="315.49997" inkscape:document-units="px" inkscape:current-layer="layer1" showgrid="false" - inkscape:window-width="1920" - inkscape:window-height="1043" + inkscape:window-width="1440" + inkscape:window-height="900" inkscape:window-x="0" inkscape:window-y="0" inkscape:window-maximized="1" inkscape:snap-global="true" - showguides="true"> + showguides="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1" + inkscape:export-bgcolor="#ffffffff"> <inkscape:grid type="xygrid" id="grid5357" empspacing="5" - visible="true" + visible="false" enabled="true" - snapvisiblegridlinesonly="true" /> + snapvisiblegridlinesonly="true" + originx="0" + originy="0" + spacingy="1" + spacingx="1" + units="px" /> </sodipodi:namedview> <metadata id="metadata7"> @@ -113,7 +121,8 @@ transform="translate(205.92143,-296.03137)" id="g5650-7" inkscape:export-xdpi="90" - inkscape:export-ydpi="90"> + inkscape:export-ydpi="90" + inkscape:export-filename="rest_start.png"> <path inkscape:connector-curvature="0" id="path5570-9" @@ -921,7 +930,7 @@ id="tspan5373-6-5" x="148.29512" y="728.47717" - style="font-size:16px;line-height:1.25;font-family:sans-serif">valid_content_headers</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">rate_limited</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" @@ -934,7 +943,7 @@ id="tspan5373-6-1" x="147.83809" y="812.14441" - style="font-size:16px;line-height:1.25;font-family:sans-serif">valid_entity_length</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">valid_content_headers</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" @@ -1098,7 +1107,7 @@ id="tspan5373-6-33" x="164.26562" y="771.62329" - style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:start;letter-spacing:0px;word-spacing:0px;text-anchor:start;fill:#6d8e41;fill-opacity:1;stroke:none" @@ -1241,7 +1250,7 @@ id="tspan5373-9-5" x="315.26172" y="707.97595" - style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">true*</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" @@ -1448,7 +1457,7 @@ id="tspan5373-58" x="524.26172" y="729.9563" - style="font-size:16px;line-height:1.25;font-family:sans-serif">501 not implemented</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">429 too many requests</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#77823c;fill-opacity:1;stroke:none" @@ -1461,7 +1470,7 @@ id="tspan5373-60" x="524.26172" y="813.64819" - style="font-size:16px;line-height:1.25;font-family:sans-serif">413 request entity too large</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">501 not implemented</tspan></text> <rect style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#a9ca7d;stroke-width:2.44279909;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" id="rect5367" @@ -1576,7 +1585,7 @@ id="tspan5373-6-1-1" x="147.8381" y="895.81165" - style="font-size:16px;line-height:1.25;font-family:sans-serif">rate_limited</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">valid_entity_length</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#6d8e41;fill-opacity:1;stroke:none" @@ -1615,7 +1624,7 @@ id="tspan5373-6-9-6" x="164.26562" y="938.95776" - style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">true</tspan></text> <text xml:space="preserve" style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:Sans;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#9b3b1c;fill-opacity:1;stroke:none" @@ -1628,7 +1637,7 @@ id="tspan5373-9-012-6" x="315.26172" y="875.08826" - style="font-size:16px;line-height:1.25;font-family:sans-serif">true*</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">false</tspan></text> <rect style="fill:#effab4;fill-opacity:1;fill-rule:nonzero;stroke:#c7d28c;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" id="rect5273-7-3-5-2" @@ -1651,6 +1660,6 @@ id="tspan5373-60-1" x="524.26172" y="897.31543" - style="font-size:16px;line-height:1.25;font-family:sans-serif">429 too many requests</tspan></text> + style="font-size:16px;line-height:1.25;font-family:sans-serif">413 request entity too large</tspan></text> </g> </svg> diff --git a/doc/src/guide/specs.asciidoc b/doc/src/guide/specs.asciidoc index 32cd206..14736b3 100644 --- a/doc/src/guide/specs.asciidoc +++ b/doc/src/guide/specs.asciidoc @@ -96,6 +96,8 @@ for or related to HTTP. * https://tools.ietf.org/html/rfc8673[RFC 8673]: HTTP Random Access and Live Content * https://tools.ietf.org/html/rfc8674[RFC 8674]: The "safe" HTTP Preference * https://tools.ietf.org/html/rfc8740[RFC 8740]: Using TLS 1.3 with HTTP/2 +* https://tools.ietf.org/html/rfc8941[RFC 8941]: Structured Field Values for HTTP +* https://tools.ietf.org/html/rfc8942[RFC 8942]: HTTP Client Hints * https://www.w3.org/TR/trace-context/[Trace Context]: Trace Context * https://www.w3.org/TR/webmention/[Webmention]: Webmention diff --git a/doc/src/guide/streams.asciidoc b/doc/src/guide/streams.asciidoc index 0ac84ce..e8ddae0 100644 --- a/doc/src/guide/streams.asciidoc +++ b/doc/src/guide/streams.asciidoc @@ -65,6 +65,11 @@ automatically compress responses when possible. It is not enabled by default. It is a good example for writing your own handlers that will modify responses. +link:man:cowboy_decompress_h(3)[cowboy_decompress_h] will +automatically decompress request bodies when possible. +It is not enabled by default. It is a good example for +writing your own handlers that will modify requests. + link:man:cowboy_metrics_h(3)[cowboy_metrics_h] gathers metrics about a stream then passes them to a configurable function. It is not enabled by default. diff --git a/doc/src/guide/ws_handlers.asciidoc b/doc/src/guide/ws_handlers.asciidoc index 5cfdcb1..7005665 100644 --- a/doc/src/guide/ws_handlers.asciidoc +++ b/doc/src/guide/ws_handlers.asciidoc @@ -62,7 +62,7 @@ init(Req0, State) -> undefined -> {cowboy_websocket, Req0, State}; Subprotocols -> - case lists:keymember(<<"mqtt">>, 1, Subprotocols) of + case lists:member(<<"mqtt">>, 1, Subprotocols) of true -> Req = cowboy_req:set_resp_header(<<"sec-websocket-protocol">>, <<"mqtt">>, Req0), diff --git a/doc/src/manual/cowboy.asciidoc b/doc/src/manual/cowboy.asciidoc index c213df0..9298b6d 100644 --- a/doc/src/manual/cowboy.asciidoc +++ b/doc/src/manual/cowboy.asciidoc @@ -14,6 +14,7 @@ manipulating Ranch listeners. * link:man:cowboy:start_clear(3)[cowboy:start_clear(3)] - Listen for connections using plain TCP * link:man:cowboy:start_tls(3)[cowboy:start_tls(3)] - Listen for connections using TLS * link:man:cowboy:stop_listener(3)[cowboy:stop_listener(3)] - Stop the given listener +* link:man:cowboy:get_env(3)[cowboy:get_env(3)] - Retrieve a listener's environment value * link:man:cowboy:set_env(3)[cowboy:set_env(3)] - Update a listener's environment value == Types diff --git a/doc/src/manual/cowboy.get_env.asciidoc b/doc/src/manual/cowboy.get_env.asciidoc new file mode 100644 index 0000000..c2895b0 --- /dev/null +++ b/doc/src/manual/cowboy.get_env.asciidoc @@ -0,0 +1,78 @@ += cowboy:get_env(3) + +== Name + +cowboy:get_env - Retrieve a listener's environment value + +== Description + +[source,erlang] +---- +get_env(Name :: ranch:ref(), + Key :: atom()) + -> any() + +get_env(Name :: ranch:ref(), + Key :: atom(), + Default :: any()) + -> any() +---- + +Retrieve an environment value for a previously started +listener. + +This function may crash when the key is missing from the +environment and a default value is not provided. + +== Arguments + +Name:: + +The name of the listener to access. ++ +The name of the listener is the first argument given to the +link:man:cowboy:start_clear(3)[cowboy:start_clear(3)], +link:man:cowboy:start_tls(3)[cowboy:start_tls(3)] or +link:man:ranch:start_listener(3)[ranch:start_listener(3)] function. + +Key:: + +The key in the environment map. Common keys include `dispatch` +and `middlewares`. + +Default:: + +The default value if the key is missing. + +== Return value + +The environment value is returned on success. + +If a default was provided and the key is missing, then the +default value is returned. + +An `exit:badarg` exception is thrown when the listener does +not exist. + +An `exit:{badkey, Key}` exception is thrown when the key +requested is missing and no default was provided. + +== Changelog + +* *2.11*: Function introduced. + +== Examples + +.Retrieve a listener's routes +[source,erlang] +---- +Dispatch = cowboy:get_env(example, dispatch). +---- + +== See also + +link:man:cowboy(3)[cowboy(3)], +link:man:cowboy:start_clear(3)[cowboy:start_clear(3)], +link:man:cowboy:start_tls(3)[cowboy:start_tls(3)], +link:man:cowboy:set_env(3)[cowboy:set_env(3)], +link:man:ranch:get_protocol_options(3)[ranch:get_protocol_options(3)] diff --git a/doc/src/manual/cowboy.set_env.asciidoc b/doc/src/manual/cowboy.set_env.asciidoc index 30af485..fc0d1f7 100644 --- a/doc/src/manual/cowboy.set_env.asciidoc +++ b/doc/src/manual/cowboy.set_env.asciidoc @@ -76,4 +76,5 @@ cowboy:set_env(example, dispatch, Dispatch). link:man:cowboy(3)[cowboy(3)], link:man:cowboy:start_clear(3)[cowboy:start_clear(3)], link:man:cowboy:start_tls(3)[cowboy:start_tls(3)], +link:man:cowboy:get_env(3)[cowboy:get_env(3)], link:man:ranch:set_protocol_options(3)[ranch:set_protocol_options(3)] diff --git a/doc/src/manual/cowboy_app.asciidoc b/doc/src/manual/cowboy_app.asciidoc index 8db84b1..fd833be 100644 --- a/doc/src/manual/cowboy_app.asciidoc +++ b/doc/src/manual/cowboy_app.asciidoc @@ -36,6 +36,7 @@ Stream handlers: * link:man:cowboy_stream_h(3)[cowboy_stream_h(3)] - Default stream handler * link:man:cowboy_compress_h(3)[cowboy_compress_h(3)] - Compress stream handler +* link:man:cowboy_decompress_h(3)[cowboy_decompress_h(3)] - Decompress stream handler * link:man:cowboy_metrics_h(3)[cowboy_metrics_h(3)] - Metrics stream handler * link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)] - Tracer stream handler diff --git a/doc/src/manual/cowboy_compress_h.asciidoc b/doc/src/manual/cowboy_compress_h.asciidoc index 31a9162..6551567 100644 --- a/doc/src/manual/cowboy_compress_h.asciidoc +++ b/doc/src/manual/cowboy_compress_h.asciidoc @@ -9,7 +9,7 @@ cowboy_compress_h - Compress stream handler The module `cowboy_compress_h` compresses response bodies automatically when the client supports it. It will not try to compress responses that already have a content -encoding. +encoding or that have an etag header defined. Normal responses will only be compressed when their size is lower than the configured threshold. Streamed @@ -55,6 +55,10 @@ The compress stream handler does not produce any event. == Changelog +* *2.11*: Compression is now disabled when the etag + header is in the response headers. +* *2.11*: The vary: accept-encoding header is now + always set when this handler is enabled. * *2.6*: The options `compress_buffering` and `compress_threshold` were added. * *2.0*: Module introduced. @@ -63,6 +67,7 @@ The compress stream handler does not produce any event. link:man:cowboy(7)[cowboy(7)], link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_decompress_h(3)[cowboy_decompress_h(3)], link:man:cowboy_metrics_h(3)[cowboy_metrics_h(3)], link:man:cowboy_stream_h(3)[cowboy_stream_h(3)], link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)] diff --git a/doc/src/manual/cowboy_decompress_h.asciidoc b/doc/src/manual/cowboy_decompress_h.asciidoc new file mode 100644 index 0000000..8598ae4 --- /dev/null +++ b/doc/src/manual/cowboy_decompress_h.asciidoc @@ -0,0 +1,70 @@ += cowboy_decompress_h(3) + +== Name + +cowboy_decompress_h - Decompress stream handler + +== Description + +The module `cowboy_decompress_h` decompresses request bodies +automatically when the server supports it. + +The only compression algorithm currently supported is the +gzip algorithm. Another limitation is that decompression +is only attempted when gzip is the only content-encoding +in the request. + +This stream handler always adds a field to the Req object +with the name `content_decoded` which is treated as a +list of decoded content-encoding values. Currently this +list may only contain the `<<"gzip">>` binary if content +was decoded; or be empty otherwise. + +== Options + +[source,erlang] +---- +opts() :: #{ + decompress_enabled => boolean(), + decompress_ratio_limit => non_neg_integer() +} +---- + +Configuration for the decompress stream handler. + +The default value is given next to the option name: + +decompress_ratio_limit (20):: +The max ratio of the compressed and decompressed body +before it is rejected with a `413 Payload Too Large` +error response. ++ +This option can be updated at any time using the +`set_options` stream handler command. + +decompress_enabled (true):: + +Whether the handler is enabled by default. ++ +This option can be updated using the `set_options` +stream handler command. This allows disabling +decompression for the current stream. Attempts +to enable or disable decompression after starting +to read the body will be ignored. + +== Events + +The decompress stream handler does not produce any event. + +== Changelog + +* *2.11*: Module introduced. + +== See also + +link:man:cowboy(7)[cowboy(7)], +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_compress_h(3)[cowboy_compress_h(3)], +link:man:cowboy_metrics_h(3)[cowboy_metrics_h(3)], +link:man:cowboy_stream_h(3)[cowboy_stream_h(3)], +link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)] diff --git a/doc/src/manual/cowboy_http.asciidoc b/doc/src/manual/cowboy_http.asciidoc index 33d2888..96a5585 100644 --- a/doc/src/manual/cowboy_http.asciidoc +++ b/doc/src/manual/cowboy_http.asciidoc @@ -17,27 +17,32 @@ as a Ranch protocol. [source,erlang] ---- opts() :: #{ - active_n => pos_integer(), - chunked => boolean(), - connection_type => worker | supervisor, - http10_keepalive => boolean(), - idle_timeout => timeout(), - inactivity_timeout => timeout(), - initial_stream_flow_size => non_neg_integer(), - linger_timeout => timeout(), - logger => module(), - max_empty_lines => non_neg_integer(), - max_header_name_length => non_neg_integer(), - max_header_value_length => non_neg_integer(), - max_headers => non_neg_integer(), - max_keepalive => non_neg_integer(), - max_method_length => non_neg_integer(), - max_request_line_length => non_neg_integer(), - max_skip_body_length => non_neg_integer(), - proxy_header => boolean(), - request_timeout => timeout(), - sendfile => boolean(), - stream_handlers => [module()] + active_n => pos_integer(), + alpn_default_protocol => http | http2, + chunked => boolean(), + connection_type => worker | supervisor, + dynamic_buffer => false | {pos_integer(), pos_integer()}, + hibernate => boolean(), + http10_keepalive => boolean(), + idle_timeout => timeout(), + inactivity_timeout => timeout(), + initial_stream_flow_size => non_neg_integer(), + linger_timeout => timeout(), + logger => module(), + max_empty_lines => non_neg_integer(), + max_header_name_length => non_neg_integer(), + max_header_value_length => non_neg_integer(), + max_headers => non_neg_integer(), + max_keepalive => non_neg_integer(), + max_method_length => non_neg_integer(), + max_request_line_length => non_neg_integer(), + max_skip_body_length => non_neg_integer(), + protocols => [http | http2], + proxy_header => boolean(), + request_timeout => timeout(), + reset_idle_timeout_on_send => boolean(), + sendfile => boolean(), + stream_handlers => [module()] } ---- @@ -52,7 +57,7 @@ Ranch functions `ranch:get_protocol_options/1` and The default value is given next to the option name: -active_n (100):: +active_n (1):: The number of packets Cowboy will request from the socket at once. This can be used to tweak the performance of the server. Higher @@ -60,6 +65,12 @@ values reduce the number of times Cowboy need to request more packets from the port driver at the expense of potentially higher memory being used. +alpn_default_protocol (http):: + +Default protocol to use when the client connects over TLS +without ALPN. Can be set to `http2` to disable HTTP/1.1 +entirely. + chunked (true):: Whether chunked transfer-encoding is enabled for HTTP/1.1 connections. @@ -74,6 +85,21 @@ connection_type (supervisor):: Whether the connection process also acts as a supervisor. +dynamic_buffer ({1024, 131072}):: + +Cowboy will dynamically change the socket's `buffer` size +depending on the size of the data it receives from the socket. +This lets Cowboy use the optimal buffer size for the current +workload. ++ +The dynamic buffer size functionality can be disabled by +setting this option to `false`. Cowboy will also disable +it by default when the `buffer` transport option is configured. + +hibernate (false):: + +Whether the connection process will hibernate automatically. + http10_keepalive (true):: Whether keep-alive is enabled for HTTP/1.0 connections. @@ -87,7 +113,7 @@ This option can be updated at any time using the inactivity_timeout (300000):: -Time in ms with nothing received at all before Cowboy closes the connection. +**DEPRECATED** Time in ms with nothing received at all before Cowboy closes the connection. initial_stream_flow_size (65535):: @@ -138,6 +164,13 @@ max_skip_body_length (1000000):: Maximum length Cowboy is willing to skip when the user code did not read the body fully. When the remaining length is too large or unknown Cowboy will close the connection. +protocols ([http2, http]):: + +Protocols that may be used when the client connects over +cleartext TCP. The default is to allow both HTTP/1.1 and +HTTP/2. HTTP/1.1 and HTTP/2 can be disabled entirely by +omitting them from the list. + proxy_header (false):: Whether incoming connections have a PROXY protocol header. The @@ -148,6 +181,11 @@ request_timeout (5000):: Time in ms with no requests before Cowboy closes the connection. +reset_idle_timeout_on_send (false):: + +Whether the `idle_timeout` gets reset when sending data +to the socket. + sendfile (true):: Whether the sendfile syscall may be used. It can be useful to disable @@ -160,6 +198,10 @@ Ordered list of stream handlers that will handle all stream events. == Changelog +* *2.13*: The `inactivity_timeout` option was deprecated. +* *2.13*: The `active_n` default value was changed to `1`. +* *2.13*: The `dynamic_buffer` and `hibernate` options were added. +* *2.11*: The `reset_idle_timeout_on_send` option was added. * *2.8*: The `active_n` option was added. * *2.7*: The `initial_stream_flow_size` and `logger` options were added. * *2.6*: The `chunked`, `http10_keepalive`, `proxy_header` and `sendfile` options were added. diff --git a/doc/src/manual/cowboy_http2.asciidoc b/doc/src/manual/cowboy_http2.asciidoc index de632be..7b34b88 100644 --- a/doc/src/manual/cowboy_http2.asciidoc +++ b/doc/src/manual/cowboy_http2.asciidoc @@ -18,12 +18,15 @@ as a Ranch protocol. ---- opts() :: #{ active_n => pos_integer(), + alpn_default_protocol => http | http2, connection_type => worker | supervisor, connection_window_margin_size => 0..16#7fffffff, connection_window_update_threshold => 0..16#7fffffff, + dynamic_buffer => false | {pos_integer(), pos_integer()}, enable_connect_protocol => boolean(), goaway_initial_timeout => timeout(), goaway_complete_timeout => timeout(), + hibernate => boolean(), idle_timeout => timeout(), inactivity_timeout => timeout(), initial_connection_window_size => 65535..16#7fffffff, @@ -35,14 +38,18 @@ opts() :: #{ max_connection_window_size => 0..16#7fffffff, max_decode_table_size => non_neg_integer(), max_encode_table_size => non_neg_integer(), + max_fragmented_header_block_size => 16384..16#7fffffff, max_frame_size_received => 16384..16777215, max_frame_size_sent => 16384..16777215 | infinity, max_received_frame_rate => {pos_integer(), timeout()}, max_reset_stream_rate => {pos_integer(), timeout()}, + max_cancel_stream_rate => {pos_integer(), timeout()}, max_stream_buffer_size => non_neg_integer(), max_stream_window_size => 0..16#7fffffff, preface_timeout => timeout(), + protocols => [http | http2], proxy_header => boolean(), + reset_idle_timeout_on_send => boolean(), sendfile => boolean(), settings_timeout => timeout(), stream_handlers => [module()], @@ -63,7 +70,7 @@ Ranch functions `ranch:get_protocol_options/1` and The default value is given next to the option name: -active_n (100):: +active_n (1):: The number of packets Cowboy will request from the socket at once. This can be used to tweak the performance of the server. Higher @@ -71,6 +78,12 @@ values reduce the number of times Cowboy need to request more packets from the port driver at the expense of potentially higher memory being used. +alpn_default_protocol (http):: + +Default protocol to use when the client connects over TLS +without ALPN. Can be set to `http2` to disable HTTP/1.1 +entirely. + connection_type (supervisor):: Whether the connection process also acts as a supervisor. @@ -88,11 +101,25 @@ The connection window will only get updated when its size becomes lower than this threshold, in bytes. This is to avoid sending too many `WINDOW_UPDATE` frames. +dynamic_buffer ({1024, 131072}):: + +Cowboy will dynamically change the socket's `buffer` size +depending on the size of the data it receives from the socket. +This lets Cowboy use the optimal buffer size for the current +workload. ++ +The dynamic buffer size functionality can be disabled by +setting this option to `false`. Cowboy will also disable +it by default when the `buffer` transport option is configured. + enable_connect_protocol (false):: Whether to enable the extended CONNECT method to allow protocols like Websocket to be used over an HTTP/2 stream. -This option is experimental and disabled by default. ++ +For backward compatibility reasons, this option is disabled +by default. It must be enabled to use Websocket over HTTP/2. +It will be enabled by default in a future release. goaway_initial_timeout (1000):: @@ -104,13 +131,17 @@ goaway_complete_timeout (3000):: Time in ms to wait for ongoing streams to complete before closing the connection during a graceful shutdown. +hibernate (false):: + +Whether the connection process will hibernate automatically. + idle_timeout (60000):: Time in ms with no data received before Cowboy closes the connection. inactivity_timeout (300000):: -Time in ms with nothing received at all before Cowboy closes the connection. +**DEPRECATED** Time in ms with nothing received at all before Cowboy closes the connection. initial_connection_window_size (65535):: @@ -167,11 +198,25 @@ Maximum header table size in bytes used by the encoder. The server will compare this value to what the client advertises and choose the smallest one as the encoder's header table size. +max_fragmented_header_block_size (32768):: + +Maximum header block size when headers are split over multiple HEADERS +and CONTINUATION frames. Clients that attempt to send header blocks +larger than this value will receive an ENHANCE_YOUR_CALM connection +error. Note that this value is not advertised and should be large +enough for legitimate requests. + max_frame_size_received (16384):: Maximum size in bytes of the frames received by the server. This value is advertised to the remote endpoint which can then decide to use any value lower or equal for its frame sizes. ++ +It is highly recommended to increase this value for performance reasons. +In a future Cowboy version the default will be increased to 1MB (1048576). +Too low values may result in very large file uploads failing because +Cowboy will detect the large number of frames as flood and close the +connection. max_frame_size_sent (infinity):: @@ -198,6 +243,14 @@ the number of streams that can be reset over a certain time period. The rate is expressed as a tuple `{NumResets, TimeMs}`. This is similar to a supervisor restart intensity/period. +max_cancel_stream_rate ({500, 10000}):: + +Maximum cancel stream rate per connection. This can be used to +protect against misbehaving or malicious peers, by limiting the +number of streams that the peer can reset over a certain time period. +The rate is expressed as a tuple `{NumCancels, TimeMs}`. This is +similar to a supervisor restart intensity/period. + max_stream_buffer_size (8000000):: Maximum stream buffer size in bytes. This is a soft limit used @@ -214,12 +267,24 @@ preface_timeout (5000):: Time in ms Cowboy is willing to wait for the connection preface. +protocols ([http2, http]):: + +Protocols that may be used when the client connects over +cleartext TCP. The default is to allow both HTTP/1.1 and +HTTP/2. HTTP/1.1 and HTTP/2 can be disabled entirely by +omitting them from the list. + proxy_header (false):: Whether incoming connections have a PROXY protocol header. The proxy information will be passed forward via the `proxy_header` key of the Req object. +reset_idle_timeout_on_send (false):: + +Whether the `idle_timeout` gets reset when sending data +to the socket. + sendfile (true):: Whether the sendfile syscall may be used. It can be useful to disable @@ -256,6 +321,15 @@ too many `WINDOW_UPDATE` frames. == Changelog +* *2.13*: The `inactivity_timeout` option was deprecated. +* *2.13*: The `active_n` default value was changed to `1`. +* *2.13*: The `dynamic_buffer` and `hibernate` options were added. +* *2.11*: Websocket over HTTP/2 is now considered stable. +* *2.11*: The `reset_idle_timeout_on_send` option was added. +* *2.11*: Add the option `max_cancel_stream_rate` to protect + against another flood scenario. +* *2.9*: The `goaway_initial_timeout` and `goaway_complete_timeout` + options were added. * *2.8*: The `active_n` option was added. * *2.8*: The `linger_timeout` option was added. * *2.8*: The `max_received_frame_rate` default value has @@ -281,7 +355,7 @@ too many `WINDOW_UPDATE` frames. `max_frame_size_received`, `max_frame_size_sent` and `settings_timeout` to configure HTTP/2 SETTINGS and related behavior. -* *2.4*: Add the experimental option `enable_connect_protocol`. +* *2.4*: Add the option `enable_connect_protocol`. * *2.0*: Protocol introduced. == See also diff --git a/doc/src/manual/cowboy_loop.asciidoc b/doc/src/manual/cowboy_loop.asciidoc index 000149d..8c9a816 100644 --- a/doc/src/manual/cowboy_loop.asciidoc +++ b/doc/src/manual/cowboy_loop.asciidoc @@ -28,11 +28,11 @@ Loop handlers implement the following interface: ---- init(Req, State) -> {cowboy_loop, Req, State} - | {cowboy_loop, Req, State, hibernate} + | {cowboy_loop, Req, State, hibernate | timeout()} info(Info, Req, State) -> {ok, Req, State} - | {ok, Req, State, hibernate} + | {ok, Req, State, hibernate | timeout()} | {stop, Req, State} terminate(Reason, Req, State) -> ok %% optional @@ -69,7 +69,9 @@ stop:: == Changelog -* *2.0*: Loop handlers no longer need to handle overflow/timeouts. +* *2.11*: A timeout may be returned instead of `hibernate`. + It functions the same way as the `gen_server` timeout. +* *2.0*: Loop handlers no longer need to handle socket events. * *1.0*: Behavior introduced. == See also diff --git a/doc/src/manual/cowboy_metrics_h.asciidoc b/doc/src/manual/cowboy_metrics_h.asciidoc index c871d57..801bdbb 100644 --- a/doc/src/manual/cowboy_metrics_h.asciidoc +++ b/doc/src/manual/cowboy_metrics_h.asciidoc @@ -160,5 +160,6 @@ The metrics stream handler does not produce any event. link:man:cowboy(7)[cowboy(7)], link:man:cowboy_stream(3)[cowboy_stream(3)], link:man:cowboy_compress_h(3)[cowboy_compress_h(3)], +link:man:cowboy_decompress_h(3)[cowboy_decompress_h(3)], link:man:cowboy_stream_h(3)[cowboy_stream_h(3)], link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)] diff --git a/doc/src/manual/cowboy_req.asciidoc b/doc/src/manual/cowboy_req.asciidoc index 0a1ca1b..0367836 100644 --- a/doc/src/manual/cowboy_req.asciidoc +++ b/doc/src/manual/cowboy_req.asciidoc @@ -120,8 +120,8 @@ request's URI. [source,erlang] ---- read_body_opts() :: #{ - length => non_neg_integer(), - period => non_neg_integer(), + length => non_neg_integer() | auto, + period => non_neg_integer() | infinity, timeout => timeout() } ---- @@ -130,6 +130,10 @@ Body reading options. The defaults are function-specific. +Auto mode can be enabled by setting `length` to `auto` +and `period` to `infinity`. The period cannot be set +to `infinity` when auto mode isn't used. + === req() [source,erlang] diff --git a/doc/src/manual/cowboy_req.cast.asciidoc b/doc/src/manual/cowboy_req.cast.asciidoc index d6e018f..b12157f 100644 --- a/doc/src/manual/cowboy_req.cast.asciidoc +++ b/doc/src/manual/cowboy_req.cast.asciidoc @@ -36,6 +36,22 @@ The atom `ok` is always returned. It can be safely ignored. == Examples +.Read the body using auto mode +[source,erlang] +---- +read_body_auto_async(Req) -> + read_body_auto_async(Req, make_ref(), <<>>). + +read_body_auto_async(Req, Ref, Acc) -> + cowboy_req:cast({read_body, self(), Ref, auto, infinity}, Req), + receive + {request_body, Ref, nofin, Data} -> + read_body_auto_async(Req, Ref, <<Acc/binary, Data/binary>>); + {request_body, Ref, fin, _BodyLen, Data} -> + {ok, <<Acc/binary, Data/binary>>, Req} + end. +---- + .Increase the HTTP/1.1 idle timeout [source,erlang] ---- diff --git a/doc/src/manual/cowboy_req.read_body.asciidoc b/doc/src/manual/cowboy_req.read_body.asciidoc index 2b87405..7da76ef 100644 --- a/doc/src/manual/cowboy_req.read_body.asciidoc +++ b/doc/src/manual/cowboy_req.read_body.asciidoc @@ -68,6 +68,13 @@ The `timeout` option is a safeguard in case the connection process becomes unresponsive. The function will crash if no message was received in that interval. The timeout should be larger than the period. It defaults to the period + 1 second. ++ +Auto mode can be enabled by setting the `length` to `auto` and +the `period` to `infinity`. When auto mode is used, Cowboy will +send data to the handler as soon as it receives it, regardless +of its size. It will wait indefinitely until data is available. +Auto mode's main purpose is asynchronous body reading using +link:man:cowboy_req:cast(3)[cowboy_req:cast(3)]. == Return value @@ -86,6 +93,9 @@ body has been read. == Changelog +* *2.11*: The `length` option now accepts `auto` and the + period now accepts `infinity`. This adds support for + reading the body in auto mode. * *2.0*: Function introduced. Replaces `body/1,2`. == Examples diff --git a/doc/src/manual/cowboy_req.set_resp_headers.asciidoc b/doc/src/manual/cowboy_req.set_resp_headers.asciidoc index 63fe424..9ca5901 100644 --- a/doc/src/manual/cowboy_req.set_resp_headers.asciidoc +++ b/doc/src/manual/cowboy_req.set_resp_headers.asciidoc @@ -11,7 +11,7 @@ cowboy_req:set_resp_headers - Set several response headers set_resp_headers(Headers, Req :: cowboy_req:req()) -> Req -Headers :: cowboy:http_headers() +Headers :: cowboy:http_headers() | [{binary(), iodata()}] ---- Set several headers to be sent with the response. @@ -32,8 +32,16 @@ instead of this function to set cookies. Headers:: -Headers as a map with keys being lowercase binary strings, -and values as binary strings. +Headers as a map with names being lowercase binary strings, +and values as iodata; or as a list with the same requirements +for names and values. ++ +When a list is given it is converted to its equivalent map, +with duplicate headers concatenated with a comma inserted +in-between. Support for lists is meant to simplify using +data from clients or other applications. ++ +The set-cookie header must not be set using this function. Req:: @@ -48,6 +56,7 @@ otherwise the headers will not be sent in the response. == Changelog +* *2.13*: The function now accepts a list of headers. * *2.0*: Function introduced. == Examples diff --git a/doc/src/manual/cowboy_rest.asciidoc b/doc/src/manual/cowboy_rest.asciidoc index a445948..fcef799 100644 --- a/doc/src/manual/cowboy_rest.asciidoc +++ b/doc/src/manual/cowboy_rest.asciidoc @@ -86,7 +86,10 @@ normal:: ---- AcceptCallback(Req, State) -> {Result, Req, State} -Result :: true | {true, URI :: iodata()} | false} +Result :: true + | {created, URI :: iodata()} + | {see_other, URI :: iodata()} + | false Default - crash ---- @@ -99,11 +102,14 @@ For PUT requests, the body is a representation of the resource that is being created or replaced. For POST requests, the body is typically application-specific -instructions on how to process the request, but it may also -be a representation of the resource. When creating a new -resource with POST at a different location, return `{true, URI}` +instructions on how to process the request, but it may also be a +representation of the resource. When creating a new resource with POST +at a different location, return `{created, URI}` or `{see_other, URI}` with `URI` the new location. +The `see_other` tuple will redirect the client to the new location +automatically. + For PATCH requests, the body is a series of instructions on how to update the resource. Patch files or JSON Patch are examples of such media types. @@ -373,7 +379,7 @@ and that the request shouldn't be repeated. ---- generate_etag(Req, State) -> {Result, Req, State} -Result :: binary() | {weak | strong, binary()} +Result :: binary() | {weak | strong, binary()} | undefined Default - no etag value ---- @@ -383,6 +389,10 @@ When a binary is returned, the value is automatically parsed to a tuple. The binary must be in the same format as the etag header, including quotes. +It is possible to conditionally generate an etag. +When no etag can be generated, `undefined` should +be returned. + === is_authorized [source,erlang] @@ -595,17 +605,139 @@ The response body can be provided either as the actual data to be sent or a tuple indicating which file to send. This function is called for both GET and HEAD requests. For -the latter the body is not sent, however. +the latter the body is not sent: it is only used to calculate +the content length. // @todo Perhaps we can optimize HEAD requests and just // allow calculating the length instead of returning the // whole thing. -Note that there used to be a way to stream the response body. -It was temporarily removed and will be added back in a later -release. +It is possible to stream the response body either by manually +sending the response and returning a `stop` value; or by +switching to a different handler (for example a loop handler) +and manually sending the response. All headers already set +by Cowboy will also be included in the response. + +== RangeCallback + +[source,erlang] +---- +RangeCallback(Req, State) -> {Result, Req, State} + +Result :: [{Range, Body}] +Range :: {From, To, Total} | binary() +From :: non_neg_integer() +To :: non_neg_integer() +Total :: non_neg_integer() | '*' +Body :: cowboy_req:resp_body() +Default - crash +---- + +Return a list of ranges for the response body. + +The range selected can be found in the key `range` +in the Req object, as indicated in `range_satisfiable`. + +Instead of returning the full response body as would +be done in the `ProvideCallback`, a list of ranges +must be returned. There can be one or more range. +When one range is returned, a normal ranged response +is sent. When multiple ranges are returned, Cowboy +will automatically send a multipart/byteranges +response. + +When the total is not known the atom `'*'` can be +returned. + +== ranges_provided + +[source,erlang] +---- +ranges_provided(Req, State) -> {Result, Req, State} + +Result :: [Range | Auto] +Range :: { + binary(), %% lowercase; case insensitive + RangeCallback :: atom() +} +Auto :: {<<"bytes">>, auto} +Default - skip this step +---- + +Return the list of range units the resource provides. + +During content negotiation Cowboy will build an accept-ranges +response header with the list of ranges provided. Cowboy +does not choose a range at this time; ranges are choosen +when it comes time to call the `ProvideCallback`. + +By default ranged requests will be handled the same as normal +requests: the `ProvideCallback` will be called and the full +response body will be sent. + +It is possible to let Cowboy handle ranged responses +automatically when the range unit is bytes and the +atom returned is `auto` (instead of a callback name). +In that case Cowboy will call the `ProvideCallback` +and split the response automatically, including by +producing a multipart/byteranges response if necessary. + +== range_satisfiable + +[source,erlang] +---- +range_satisfiable(Req, State) -> {Result, Req, State} + +Result :: boolean() | {false, non_neg_integer() | iodata()} +Default :: true +---- + +Whether the range request is satisfiable. + +When the time comes to send the response body, and when +ranges have been provided via the `ranges_provided` +callback, Cowboy will process the if-range and the +range request headers and ensure it is satisfiable. + +This callback allows making resource-specific checks +before sending the ranged response. The default is +to accept sending a ranged response. + +Cowboy adds the requested `range` to the Req object +just before calling this callback: + +[source,erlang] +---- +req() :: #{ + range => { + binary(), %% lowercase; case insensitive + Range + } +} + +Range :: ByteRange | binary() + +ByteRange :: [{FirstByte, LastByte | infinity} | SuffixLen] +FirstByte :: non_neg_integer() +LastByte :: non_neg_integer() +SuffixLen :: neg_integer() +---- + +Only byte ranges are parsed. Other ranges are provided +as binary. Byte ranges may either be requested from first +to last bytes (inclusive); from first bytes to the end +(`infinity` is used to represent the last byte); or +the last bytes of the representation via a negative +integer (so -500 means the last 500 bytes). -// @todo Add a way to switch to loop handler for streaming the body. +Returning `false` will result in a 416 Range Not Satisfiable +response being sent. The content-range header will be +set automatically in the response if a tuple is +returned. The integer value represents the total +size (in the choosen unit) of the resource. An +iodata value may also be returned and will be +used as-is to build the content range header, +prepended with the unit choosen. === rate_limited @@ -615,7 +747,7 @@ rate_limited(Req, State) -> {Result, Req, State} Result :: false | {true, RetryAfter} RetryAfter :: non_neg_integer() | calendar:datetime() -Default - false +Default :: false ---- Return whether the user is rate limited. @@ -724,6 +856,14 @@ listed here, like the authorization header. == Changelog +* *2.11*: The `ranges_provided`, `range_satisfiable` and + the `RangeCallback` callbacks have been added. +* *2.11*: The `generate_etag` callback can now return + `undefined` to conditionally avoid generating + an etag. +* *2.9*: An `AcceptCallback` can now return `{created, URI}` or + `{see_other, URI}`. The return value `{true, URI}` + is deprecated. * *2.7*: The media type wildcard in `content_types_accepted` is now documented. * *2.6*: The callback `rate_limited` was added. diff --git a/doc/src/manual/cowboy_static.asciidoc b/doc/src/manual/cowboy_static.asciidoc index 0e131dd..dde3401 100644 --- a/doc/src/manual/cowboy_static.asciidoc +++ b/doc/src/manual/cowboy_static.asciidoc @@ -129,6 +129,8 @@ when it fails to detect a file's MIME type. == Changelog +* *2.11*: Support for range requests was added in 2.6 and + is now considered stable. * *2.6*: The `charset` extra option was added. * *1.0*: Handler introduced. diff --git a/doc/src/manual/cowboy_stream.asciidoc b/doc/src/manual/cowboy_stream.asciidoc index 65bd06e..25a9cf9 100644 --- a/doc/src/manual/cowboy_stream.asciidoc +++ b/doc/src/manual/cowboy_stream.asciidoc @@ -52,11 +52,11 @@ HTTP/1.1 will initialize a stream only when the request-line and all headers have been received. When errors occur before that point Cowboy will call the callback `early_error/5` with a partial request, the error reason and the response -Cowboy intends to send. All other events go throuh the +Cowboy intends to send. All other events go through the stream handler using the normal callbacks. HTTP/2 will initialize the stream when the `HEADERS` block has -been fully received and decoded. Any protocol error occuring +been fully received and decoded. Any protocol error occurring before that will not result in a response being sent and will therefore not go through the stream handler. In addition Cowboy may terminate streams without sending an HTTP response @@ -84,6 +84,13 @@ the `early_error/5` callback must return a response command. // @todo The logger option and the {log, Level, Format, Args} // options need to be documented and tested. +The order in which the commands are given matters. For example, +when sending a response and at the same time creating a new child +process, the first command should be the `spawn` and the second the +`response`. The reason for that is that the sending of the response +may result in a socket error which leads to the termination of +the connection before the rest of the commands are executed. + The following commands are defined: [[inform_command]] @@ -236,6 +243,8 @@ will end successfully as far as the client is concerned. To indicate that an error occurred, either use `error_response` before stopping, or use `internal_error`. +No other command can be executed after the `stop` command. + === internal_error Stop the stream with an error. @@ -278,7 +287,7 @@ This can also be used to override stream handler options. For example this is supported by link:man:cowboy_compress_h(3)[cowboy_compress_h(3)]. -Not all options can be overriden. Please consult the +Not all options can be overridden. Please consult the relevant option's documentation for details. == Predefined events @@ -335,7 +344,7 @@ fin() :: fin | nofin ---- Used in commands and events to indicate that this is -the end of the stream. +the end of a direction of a stream. === partial_req() diff --git a/doc/src/manual/cowboy_stream.data.asciidoc b/doc/src/manual/cowboy_stream.data.asciidoc new file mode 100644 index 0000000..a0328f6 --- /dev/null +++ b/doc/src/manual/cowboy_stream.data.asciidoc @@ -0,0 +1,81 @@ += cowboy_stream:data(3) + +== Name + +cowboy_stream:data - Handle data for a stream + +== Description + +[source,erlang] +---- +data(StreamID, IsFin, Data, State) -> {Commands, State} + +StreamID :: cowboy_stream:stream_id() +IsFin :: cowboy_stream:fin() +Data :: binary() +Commands :: cowboy_stream:commands() +State - opaque +---- + +Handle data for a stream. + +This function should be called by all stream handlers. It will +propagate data to the next configured stream handler. Handlers +do not have to propagate data that has been fully handled. + +== Arguments + +StreamID:: + +The stream ID. + +IsFin:: + +Whether this is the end of the request body. + +Data:: + +The data received. + +Commands:: + +The commands to be executed. + +State:: + +The state for the next stream handler. + +== Return value + +A list of commands and an opaque state is returned. + +The list of commands returned should be included in the +commands returned from the current stream handler. It +can be modified if necessary. + +The state should be stored in the current stream +handler's state and passed to `cowboy_stream` when +necessary. The state should be treated as opaque. + +== Changelog + +* *2.0*: Function introduced. + +== Examples + +.Propagate data to the next stream handler +[source,erlang] +---- +data(StreamID, IsFin, Data, State=#state{next=Next0}) -> + MyCommands = my_commands(), + {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), + {MyCommands ++ Commands, #state{next=Next}}. +---- + +== See also + +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_stream:init(3)[cowboy_stream:init(3)], +link:man:cowboy_stream:info(3)[cowboy_stream:info(3)], +link:man:cowboy_stream:terminate(3)[cowboy_stream:terminate(3)], +link:man:cowboy_stream:early_error(3)[cowboy_stream:early_error(3)] diff --git a/doc/src/manual/cowboy_stream.early_error.asciidoc b/doc/src/manual/cowboy_stream.early_error.asciidoc new file mode 100644 index 0000000..ad1d6bf --- /dev/null +++ b/doc/src/manual/cowboy_stream.early_error.asciidoc @@ -0,0 +1,73 @@ += cowboy_stream:early_error(3) + +== Name + +cowboy_stream:early_error - Handle an early error for a stream + +== Description + +[source,erlang] +---- +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> Resp + +StreamID :: cowboy_stream:stream_id() +Reason :: cowboy_stream:reason() +PartialReq :: cowboy_stream:partial_req() +Resp :: cowboy_stream:resp_command() +Opts :: cowboy:opts() +---- + +Handle an early error for a stream. + +This function should be called by all stream handlers. It will +propagate the early error to the next configured stream handler. + +== Arguments + +StreamID:: + +The stream ID. + +Reason:: + +Reason for termination. + +PartialReq:: + +The request data that has been received so far. + +Resp:: + +The response that will be sent as a result of the early error. ++ +It may be modified by the stream handler before or after +being propagated to the next handler. + +Opts:: + +The protocol options. + +== Return value + +The response to be sent as a result of the early error. + +== Changelog + +* *2.0*: Function introduced. + +== Examples + +.Propagate the early error to the next stream handler +[source,erlang] +---- +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). +---- + +== See also + +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_stream:init(3)[cowboy_stream:init(3)], +link:man:cowboy_stream:data(3)[cowboy_stream:data(3)], +link:man:cowboy_stream:info(3)[cowboy_stream:info(3)], +link:man:cowboy_stream:terminate(3)[cowboy_stream:terminate(3)] diff --git a/doc/src/manual/cowboy_stream.info.asciidoc b/doc/src/manual/cowboy_stream.info.asciidoc new file mode 100644 index 0000000..32cbd85 --- /dev/null +++ b/doc/src/manual/cowboy_stream.info.asciidoc @@ -0,0 +1,77 @@ += cowboy_stream:info(3) + +== Name + +cowboy_stream:info - Handle a message for a stream + +== Description + +[source,erlang] +---- +info(StreamID, Info, State) -> {Commands, State} + +StreamID :: cowboy_stream:stream_id() +Info :: any() +Commands :: cowboy_stream:commands() +State - opaque +---- + +Handle a message for a stream. + +This function should be called by all stream handlers. It will +propagate the event to the next configured stream handler. +Handlers do not have to propagate events that have been +fully handled. + +== Arguments + +StreamID:: + +The stream ID. + +Info:: + +The event received. + +Commands:: + +The commands to be executed. + +State:: + +The state for the next stream handler. + +== Return value + +A list of commands and an opaque state is returned. + +The list of commands returned should be included in the +commands returned from the current stream handler. It +can be modified if necessary. + +The state should be stored in the current stream +handler's state and passed to `cowboy_stream` when +necessary. The state should be treated as opaque. + +== Changelog + +* *2.0*: Function introduced. + +== Examples + +.Propagate an event to the next stream handler +[source,erlang] +---- +info(StreamID, Info, State=#state{next=Next0}) -> + MyCommands = my_commands(), + {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0), + {MyCommands ++ Commands, #state{next=Next}}. +---- + +== See also + +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_stream:init(3)[cowboy_stream:init(3)], +link:man:cowboy_stream:data(3)[cowboy_stream:data(3)], +link:man:cowboy_stream:terminate(3)[cowboy_stream:terminate(3)], +link:man:cowboy_stream:early_error(3)[cowboy_stream:early_error(3)] diff --git a/doc/src/manual/cowboy_stream.init.asciidoc b/doc/src/manual/cowboy_stream.init.asciidoc new file mode 100644 index 0000000..addf9bc --- /dev/null +++ b/doc/src/manual/cowboy_stream.init.asciidoc @@ -0,0 +1,80 @@ += cowboy_stream:init(3) + +== Name + +cowboy_stream:init - Initialize a stream + +== Description + +[source,erlang] +---- +init(StreamID, Req, Opts) -> {Commands, State} + +StreamID :: cowboy_stream:stream_id() +Req :: cowboy_req:req() +Opts :: cowboy:opts() +Commands :: cowboy_stream:commands() +State - opaque +---- + +Initialize a stream. + +This function must be called by all stream handlers. It will +initialize the next configured stream handler. + +== Arguments + +StreamID:: + +The stream ID. + +Req:: + +The Req object. + +Opts:: + +The protocol options. + +Commands:: + +The commands to be executed. + +State:: + +The state for the next stream handler. + +== Return value + +A list of commands and an opaque state is returned. + +The list of commands returned should be included in the +commands returned from the current stream handler. It +can be modified if necessary. + +The state should be stored in the current stream +handler's state and passed to `cowboy_stream` when +necessary. The state should be treated as opaque. + +== Changelog + +* *2.0*: Function introduced. + +== Examples + +.Initialize the next stream handler +[source,erlang] +---- +init(StreamID, Req, Opts) -> + MyCommands = my_commands(), + {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts), + {MyCommands ++ Commands, #state{next=Next}}. +---- + +== See also + +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_stream:data(3)[cowboy_stream:data(3)], +link:man:cowboy_stream:info(3)[cowboy_stream:info(3)], +link:man:cowboy_stream:terminate(3)[cowboy_stream:terminate(3)], +link:man:cowboy_stream:early_error(3)[cowboy_stream:early_error(3)] diff --git a/doc/src/manual/cowboy_stream.terminate.asciidoc b/doc/src/manual/cowboy_stream.terminate.asciidoc new file mode 100644 index 0000000..8393801 --- /dev/null +++ b/doc/src/manual/cowboy_stream.terminate.asciidoc @@ -0,0 +1,61 @@ += cowboy_stream:terminate(3) + +== Name + +cowboy_stream:terminate - Terminate a stream + +== Description + +[source,erlang] +---- +terminate(StreamID, Reason, State) -> ok + +StreamID :: cowboy_stream:stream_id() +Reason :: cowboy_stream:reason() +State - opaque +---- + +Terminate a stream. + +This function must be called by all stream handlers. It will +terminate the next configured stream handler. + +== Arguments + +StreamID:: + +The stream ID. + +Reason:: + +Reason for termination. + +State:: + +The state for the next stream handler. + +== Return value + +The atom `ok` is always returned. It can be safely ignored. + +== Changelog + +* *2.0*: Function introduced. + +== Examples + +.Terminate the next stream handler +[source,erlang] +---- +terminate(StreamID, Reason, State=#state{next=Next0}) -> + my_termination(State), + cowboy_stream:terminate(StreamID, Reason, Next0). +---- + +== See also + +link:man:cowboy_stream(3)[cowboy_stream(3)], +link:man:cowboy_stream:init(3)[cowboy_stream:init(3)], +link:man:cowboy_stream:data(3)[cowboy_stream:data(3)], +link:man:cowboy_stream:info(3)[cowboy_stream:info(3)], +link:man:cowboy_stream:early_error(3)[cowboy_stream:early_error(3)] diff --git a/doc/src/manual/cowboy_stream_h.asciidoc b/doc/src/manual/cowboy_stream_h.asciidoc index c25aa3d..7e0af89 100644 --- a/doc/src/manual/cowboy_stream_h.asciidoc +++ b/doc/src/manual/cowboy_stream_h.asciidoc @@ -45,8 +45,49 @@ The default stream handler spawns the request process and receives its exit signal when it terminates. It will stop the stream once its receives it. -// @todo It also implements the read_body mechanism. -// Note that cowboy_stream_h sends the 100-continue automatically. +Because this stream handler converts events from the +request process into commands, other stream handlers +may not work properly if they are executed after the +default stream handler. Always be mindful of in which +order stream handlers will get executed. + +=== Request body + +The default stream handler implements the `read_body` +mechanism. In addition to reading the body, the handler +will automatically handle the `expect: 100-continue` +header and send a 100 Continue response. + +Normally one would use +link:man:cowboy_req:read_body(3)[cowboy_req:read_body(3)] +to read the request body. The default stream handler +will buffer data until the amount gets larger than the +requested length before sending it. Alternatively, it +will send whatever data it has when the period timeout +triggers. Depending on the protocol, the flow control +window is updated to allow receiving data for the +requested length. + +The default stream handler also comes with an automatic +mode for reading the request body. This can be used by +sending the event message `{read_body, Pid, Ref, auto, infinity}` +using link:man:cowboy_req:cast(3)[cowboy_req:cast(3)]. +The default stream handler will then send data as soon +as some becomes available using one of these two +messages depending on whether body reading was completed: + +* `{request_body, Ref, nofin, Data}` +* `{request_body, Ref, fin, BodyLen, Data}` + +Depending on the protocol, Cowboy will update the flow +control window using the size of the data that was read. + +Auto mode automatically gets disabled after data has +been sent to the handler. Therefore in order to continue +reading data a `read_body` event message must be sent +after each `request_body` message. + +=== Response In addition it returns a command for any event message looking like one of the following commands: `inform`, @@ -54,14 +95,9 @@ looking like one of the following commands: `inform`, `switch_protocol`. This is what allows the request process to send a response. -// @todo Add set_options, which updates options dynamically. - -Because this stream handler converts events from the -request process into commands, other stream handlers -may not work properly if they are executed - == Changelog +* *2.11*: Introduce body reading using auto mode. * *2.0*: Module introduced. == See also @@ -69,5 +105,7 @@ may not work properly if they are executed link:man:cowboy(7)[cowboy(7)], link:man:cowboy_stream(3)[cowboy_stream(3)], link:man:cowboy_compress_h(3)[cowboy_compress_h(3)], +link:man:cowboy_decompress_h(3)[cowboy_decompress_h(3)], link:man:cowboy_metrics_h(3)[cowboy_metrics_h(3)], -link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)] +link:man:cowboy_tracer_h(3)[cowboy_tracer_h(3)], +link:man:cowboy_req:cast(3)[cowboy_req:cast(3)] diff --git a/doc/src/manual/cowboy_tracer_h.asciidoc b/doc/src/manual/cowboy_tracer_h.asciidoc index e3592e4..4f4e9bc 100644 --- a/doc/src/manual/cowboy_tracer_h.asciidoc +++ b/doc/src/manual/cowboy_tracer_h.asciidoc @@ -84,5 +84,6 @@ The tracer stream handler does not produce any event. link:man:cowboy(7)[cowboy(7)], link:man:cowboy_stream(3)[cowboy_stream(3)], link:man:cowboy_compress_h(3)[cowboy_compress_h(3)], +link:man:cowboy_decompress_h(3)[cowboy_decompress_h(3)], link:man:cowboy_metrics_h(3)[cowboy_metrics_h(3)], link:man:cowboy_stream_h(3)[cowboy_stream_h(3)] diff --git a/doc/src/manual/cowboy_websocket.asciidoc b/doc/src/manual/cowboy_websocket.asciidoc index 5b1558c..319dbae 100644 --- a/doc/src/manual/cowboy_websocket.asciidoc +++ b/doc/src/manual/cowboy_websocket.asciidoc @@ -126,7 +126,7 @@ timeout:: received first. {error, Reason}:: - A socket error ocurred. + A socket error occurred. == Types @@ -138,7 +138,9 @@ commands() :: [Command] Command :: {active, boolean()} | {deflate, boolean()} - | {set_options, #{idle_timeout => timeout()}} + | {set_options, #{ + idle_timeout => timeout(), + max_frame_size => non_neg_integer() | infinity}} | {shutdown_reason, any()} | Frame :: cow_ws:frame() ---- @@ -159,8 +161,8 @@ effect on connections that did not negotiate compression. set_options:: -Set Websocket options. Currently only the option `idle_timeout` -may be updated from a Websocket handler. +Set Websocket options. Currently only the options `idle_timeout` +and `max_frame_size` may be updated from a Websocket handler. shutdown_reason:: @@ -201,6 +203,7 @@ opts() :: #{ active_n => pos_integer(), compress => boolean(), deflate_opts => cow_ws:deflate_opts() + dynamic_buffer => false | {pos_integer(), pos_integer()}, idle_timeout => timeout(), max_frame_size => non_neg_integer() | infinity, req_filter => fun((cowboy_req:req()) -> map()), @@ -222,7 +225,7 @@ init(Req, State) -> The default value is given next to the option name: -active_n (100):: +active_n (1):: The number of packets Cowboy will request from the socket at once. This can be used to tweak the performance of the server. Higher @@ -246,6 +249,17 @@ options and the zlib compression options. The defaults optimize the compression at the expense of some memory and CPU. +dynamic_buffer ({1024, 131072}):: + +Cowboy will dynamically change the socket's `buffer` size +depending on the size of the data it receives from the socket. +This lets Cowboy use the optimal buffer size for the current +workload. ++ +The dynamic buffer size functionality can be disabled by +setting this option to `false`. Cowboy will also disable +it by default when the `buffer` transport option is configured. + idle_timeout (60000):: Time in milliseconds that Cowboy will keep the @@ -285,6 +299,11 @@ normal circumstances if necessary. == Changelog +* *2.13*: The `active_n` default value was changed to `1`. +* *2.13*: The `dynamic_buffer` option was added. +* *2.13*: The `max_frame_size` option can now be set dynamically. +* *2.11*: Websocket over HTTP/2 is now considered stable. +* *2.11*: HTTP/1.1 Websocket no longer traps exits by default. * *2.8*: The `active_n` option was added. * *2.7*: The commands based interface has been documented. The old interface is now deprecated. diff --git a/ebin/cowboy.app b/ebin/cowboy.app index b44c951..39be200 100644 --- a/ebin/cowboy.app +++ b/ebin/cowboy.app @@ -1,9 +1,10 @@ {application, 'cowboy', [ {description, "Small, fast, modern HTTP server."}, - {vsn, "2.8.0"}, - {modules, ['cowboy','cowboy_app','cowboy_bstr','cowboy_children','cowboy_clear','cowboy_clock','cowboy_compress_h','cowboy_constraints','cowboy_handler','cowboy_http','cowboy_http2','cowboy_loop','cowboy_metrics_h','cowboy_middleware','cowboy_req','cowboy_rest','cowboy_router','cowboy_static','cowboy_stream','cowboy_stream_h','cowboy_sub_protocol','cowboy_sup','cowboy_tls','cowboy_tracer_h','cowboy_websocket']}, + {vsn, "2.13.0"}, + {modules, ['cowboy','cowboy_app','cowboy_bstr','cowboy_children','cowboy_clear','cowboy_clock','cowboy_compress_h','cowboy_constraints','cowboy_decompress_h','cowboy_handler','cowboy_http','cowboy_http2','cowboy_http3','cowboy_loop','cowboy_metrics_h','cowboy_middleware','cowboy_quicer','cowboy_req','cowboy_rest','cowboy_router','cowboy_static','cowboy_stream','cowboy_stream_h','cowboy_sub_protocol','cowboy_sup','cowboy_tls','cowboy_tracer_h','cowboy_websocket','cowboy_webtransport']}, {registered, [cowboy_sup,cowboy_clock]}, {applications, [kernel,stdlib,crypto,cowlib,ranch]}, + {optional_applications, []}, {mod, {cowboy_app, []}}, {env, []} ]}.
\ No newline at end of file @@ -17,7 +17,7 @@ ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) export ERLANG_MK_FILENAME -ERLANG_MK_VERSION = 472c405 +ERLANG_MK_VERSION = 2022.05.31-116-g0206e84-dirty ERLANG_MK_WITHOUT = # Make 3.81 and 3.82 are deprecated. @@ -47,7 +47,7 @@ verbose_0 = @ verbose_2 = set -x; verbose = $(verbose_$(V)) -ifeq ($(V),3) +ifeq ($V,3) SHELL := $(SHELL) -x endif @@ -66,7 +66,7 @@ export ERLANG_MK_TMP # "erl" command. -ERL = erl +A1 -noinput -boot no_dot_erlang +ERL = erl -noinput -boot no_dot_erlang -kernel start_distribution false +P 1024 +Q 1024 # Platform detection. @@ -162,7 +162,7 @@ define newline endef define comma_list -$(subst $(space),$(comma),$(strip $(1))) +$(subst $(space),$(comma),$(strip $1)) endef define escape_dquotes @@ -171,7 +171,7 @@ endef # Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. define erlang -$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk +$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar3/_build/prod/lib/*/ebin/ -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk endef ifeq ($(PLATFORM),msys2) @@ -180,22 +180,23 @@ else core_native_path = $1 endif -core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2 +core_http_get = curl -Lf$(if $(filter-out 0,$V),,s)o $(call core_native_path,$1) $2 -core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) +core_eq = $(and $(findstring $1,$2),$(findstring $2,$1)) # We skip files that contain spaces because they end up causing issues. -core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " ")) +# Files that begin with a dot are already ignored by the wildcard function. +core_find = $(foreach f,$(wildcard $(1:%/=%)/*),$(if $(wildcard $f/.),$(call core_find,$f,$2),$(if $(filter $(subst *,%,$2),$f),$(if $(wildcard $f),$f)))) -core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$1)))))))))))))))))))))))))) -core_ls = $(filter-out $(1),$(shell echo $(1))) +core_ls = $(filter-out $1,$(shell echo $1)) # @todo Use a solution that does not require using perl. core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) define core_render - printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) + printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $1)))))\n' > $2 endef # Automated update. @@ -245,19 +246,10 @@ KERL_MAKEFLAGS ?= OTP_GIT ?= https://github.com/erlang/otp define kerl_otp_target -$(KERL_INSTALL_DIR)/$(1): $(KERL) +$(KERL_INSTALL_DIR)/$1: $(KERL) $(verbose) if [ ! -d $$@ ]; then \ - MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \ - $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \ - fi -endef - -define kerl_hipe_target -$(KERL_INSTALL_DIR)/$1-native: $(KERL) - $(verbose) if [ ! -d $$@ ]; then \ - KERL_CONFIGURE_OPTIONS=--enable-native-libs \ - MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \ - $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \ + MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1; \ + $(KERL) install $1 $(KERL_INSTALL_DIR)/$1; \ fi endef @@ -283,10 +275,10 @@ ERLANG_OTP := $(notdir $(lastword $(sort\ endif ERLANG_OTP ?= -ERLANG_HIPE ?= # Use kerl to enforce a specific Erlang/OTP version for a project. ifneq ($(strip $(ERLANG_OTP)),) + export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH) SHELL := env PATH=$(PATH) $(SHELL) $(eval $(call kerl_otp_target,$(ERLANG_OTP))) @@ -297,101 +289,7 @@ $(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...) $(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2) endif -else -# Same for a HiPE enabled VM. -ifneq ($(strip $(ERLANG_HIPE)),) -export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH) -SHELL := env PATH=$(PATH) $(SHELL) -$(eval $(call kerl_hipe_target,$(ERLANG_HIPE))) - -# Build Erlang/OTP only if it doesn't already exist. -ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),) -$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...) -$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2) -endif - -endif -endif - -PACKAGES += aberth -pkg_aberth_name = aberth -pkg_aberth_description = Generic BERT-RPC server in Erlang -pkg_aberth_homepage = https://github.com/a13x/aberth -pkg_aberth_fetch = git -pkg_aberth_repo = https://github.com/a13x/aberth -pkg_aberth_commit = master - -PACKAGES += active -pkg_active_name = active -pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running -pkg_active_homepage = https://github.com/proger/active -pkg_active_fetch = git -pkg_active_repo = https://github.com/proger/active -pkg_active_commit = master - -PACKAGES += actordb_core -pkg_actordb_core_name = actordb_core -pkg_actordb_core_description = ActorDB main source -pkg_actordb_core_homepage = http://www.actordb.com/ -pkg_actordb_core_fetch = git -pkg_actordb_core_repo = https://github.com/biokoda/actordb_core -pkg_actordb_core_commit = master - -PACKAGES += actordb_thrift -pkg_actordb_thrift_name = actordb_thrift -pkg_actordb_thrift_description = Thrift API for ActorDB -pkg_actordb_thrift_homepage = http://www.actordb.com/ -pkg_actordb_thrift_fetch = git -pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift -pkg_actordb_thrift_commit = master - -PACKAGES += aleppo -pkg_aleppo_name = aleppo -pkg_aleppo_description = Alternative Erlang Pre-Processor -pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo -pkg_aleppo_fetch = git -pkg_aleppo_repo = https://github.com/ErlyORM/aleppo -pkg_aleppo_commit = master - -PACKAGES += alog -pkg_alog_name = alog -pkg_alog_description = Simply the best logging framework for Erlang -pkg_alog_homepage = https://github.com/siberian-fast-food/alogger -pkg_alog_fetch = git -pkg_alog_repo = https://github.com/siberian-fast-food/alogger -pkg_alog_commit = master - -PACKAGES += amqp_client -pkg_amqp_client_name = amqp_client -pkg_amqp_client_description = RabbitMQ Erlang AMQP client -pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html -pkg_amqp_client_fetch = git -pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git -pkg_amqp_client_commit = master - -PACKAGES += annotations -pkg_annotations_name = annotations -pkg_annotations_description = Simple code instrumentation utilities -pkg_annotations_homepage = https://github.com/hyperthunk/annotations -pkg_annotations_fetch = git -pkg_annotations_repo = https://github.com/hyperthunk/annotations -pkg_annotations_commit = master - -PACKAGES += antidote -pkg_antidote_name = antidote -pkg_antidote_description = Large-scale computation without synchronisation -pkg_antidote_homepage = https://syncfree.lip6.fr/ -pkg_antidote_fetch = git -pkg_antidote_repo = https://github.com/SyncFree/antidote -pkg_antidote_commit = master - -PACKAGES += apns -pkg_apns_name = apns -pkg_apns_description = Apple Push Notification Server for Erlang -pkg_apns_homepage = http://inaka.github.com/apns4erl -pkg_apns_fetch = git -pkg_apns_repo = https://github.com/inaka/apns4erl -pkg_apns_commit = master +endif PACKAGES += asciideck pkg_asciideck_name = asciideck @@ -401,525 +299,13 @@ pkg_asciideck_fetch = git pkg_asciideck_repo = https://github.com/ninenines/asciideck pkg_asciideck_commit = master -PACKAGES += azdht -pkg_azdht_name = azdht -pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang -pkg_azdht_homepage = https://github.com/arcusfelis/azdht -pkg_azdht_fetch = git -pkg_azdht_repo = https://github.com/arcusfelis/azdht -pkg_azdht_commit = master - -PACKAGES += backoff -pkg_backoff_name = backoff -pkg_backoff_description = Simple exponential backoffs in Erlang -pkg_backoff_homepage = https://github.com/ferd/backoff -pkg_backoff_fetch = git -pkg_backoff_repo = https://github.com/ferd/backoff -pkg_backoff_commit = master - -PACKAGES += barrel_tcp -pkg_barrel_tcp_name = barrel_tcp -pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. -pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp -pkg_barrel_tcp_fetch = git -pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp -pkg_barrel_tcp_commit = master - -PACKAGES += basho_bench -pkg_basho_bench_name = basho_bench -pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. -pkg_basho_bench_homepage = https://github.com/basho/basho_bench -pkg_basho_bench_fetch = git -pkg_basho_bench_repo = https://github.com/basho/basho_bench -pkg_basho_bench_commit = master - -PACKAGES += bcrypt -pkg_bcrypt_name = bcrypt -pkg_bcrypt_description = Bcrypt Erlang / C library -pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt -pkg_bcrypt_fetch = git -pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git -pkg_bcrypt_commit = master - -PACKAGES += beam -pkg_beam_name = beam -pkg_beam_description = BEAM emulator written in Erlang -pkg_beam_homepage = https://github.com/tonyrog/beam -pkg_beam_fetch = git -pkg_beam_repo = https://github.com/tonyrog/beam -pkg_beam_commit = master - -PACKAGES += beanstalk -pkg_beanstalk_name = beanstalk -pkg_beanstalk_description = An Erlang client for beanstalkd -pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk -pkg_beanstalk_fetch = git -pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk -pkg_beanstalk_commit = master - -PACKAGES += bear -pkg_bear_name = bear -pkg_bear_description = a set of statistics functions for erlang -pkg_bear_homepage = https://github.com/boundary/bear -pkg_bear_fetch = git -pkg_bear_repo = https://github.com/boundary/bear -pkg_bear_commit = master - -PACKAGES += bertconf -pkg_bertconf_name = bertconf -pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded -pkg_bertconf_homepage = https://github.com/ferd/bertconf -pkg_bertconf_fetch = git -pkg_bertconf_repo = https://github.com/ferd/bertconf -pkg_bertconf_commit = master - -PACKAGES += bifrost -pkg_bifrost_name = bifrost -pkg_bifrost_description = Erlang FTP Server Framework -pkg_bifrost_homepage = https://github.com/thorstadt/bifrost -pkg_bifrost_fetch = git -pkg_bifrost_repo = https://github.com/thorstadt/bifrost -pkg_bifrost_commit = master - -PACKAGES += binpp -pkg_binpp_name = binpp -pkg_binpp_description = Erlang Binary Pretty Printer -pkg_binpp_homepage = https://github.com/jtendo/binpp -pkg_binpp_fetch = git -pkg_binpp_repo = https://github.com/jtendo/binpp -pkg_binpp_commit = master - -PACKAGES += bisect -pkg_bisect_name = bisect -pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang -pkg_bisect_homepage = https://github.com/knutin/bisect -pkg_bisect_fetch = git -pkg_bisect_repo = https://github.com/knutin/bisect -pkg_bisect_commit = master - -PACKAGES += bitcask -pkg_bitcask_name = bitcask -pkg_bitcask_description = because you need another a key/value storage engine -pkg_bitcask_homepage = https://github.com/basho/bitcask -pkg_bitcask_fetch = git -pkg_bitcask_repo = https://github.com/basho/bitcask -pkg_bitcask_commit = develop - -PACKAGES += bitstore -pkg_bitstore_name = bitstore -pkg_bitstore_description = A document based ontology development environment -pkg_bitstore_homepage = https://github.com/bdionne/bitstore -pkg_bitstore_fetch = git -pkg_bitstore_repo = https://github.com/bdionne/bitstore -pkg_bitstore_commit = master - -PACKAGES += bootstrap -pkg_bootstrap_name = bootstrap -pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. -pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap -pkg_bootstrap_fetch = git -pkg_bootstrap_repo = https://github.com/schlagert/bootstrap -pkg_bootstrap_commit = master - -PACKAGES += boss -pkg_boss_name = boss -pkg_boss_description = Erlang web MVC, now featuring Comet -pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss -pkg_boss_fetch = git -pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss -pkg_boss_commit = master - -PACKAGES += boss_db -pkg_boss_db_name = boss_db -pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang -pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db -pkg_boss_db_fetch = git -pkg_boss_db_repo = https://github.com/ErlyORM/boss_db -pkg_boss_db_commit = master - -PACKAGES += brod -pkg_brod_name = brod -pkg_brod_description = Kafka client in Erlang -pkg_brod_homepage = https://github.com/klarna/brod -pkg_brod_fetch = git -pkg_brod_repo = https://github.com/klarna/brod.git -pkg_brod_commit = master - -PACKAGES += bson -pkg_bson_name = bson -pkg_bson_description = BSON documents in Erlang, see bsonspec.org -pkg_bson_homepage = https://github.com/comtihon/bson-erlang -pkg_bson_fetch = git -pkg_bson_repo = https://github.com/comtihon/bson-erlang -pkg_bson_commit = master - -PACKAGES += bullet -pkg_bullet_name = bullet -pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. -pkg_bullet_homepage = http://ninenines.eu -pkg_bullet_fetch = git -pkg_bullet_repo = https://github.com/ninenines/bullet -pkg_bullet_commit = master - -PACKAGES += cache -pkg_cache_name = cache -pkg_cache_description = Erlang in-memory cache -pkg_cache_homepage = https://github.com/fogfish/cache -pkg_cache_fetch = git -pkg_cache_repo = https://github.com/fogfish/cache -pkg_cache_commit = master - -PACKAGES += cake -pkg_cake_name = cake -pkg_cake_description = Really simple terminal colorization -pkg_cake_homepage = https://github.com/darach/cake-erl -pkg_cake_fetch = git -pkg_cake_repo = https://github.com/darach/cake-erl -pkg_cake_commit = master - -PACKAGES += carotene -pkg_carotene_name = carotene -pkg_carotene_description = Real-time server -pkg_carotene_homepage = https://github.com/carotene/carotene -pkg_carotene_fetch = git -pkg_carotene_repo = https://github.com/carotene/carotene -pkg_carotene_commit = master - -PACKAGES += cberl -pkg_cberl_name = cberl -pkg_cberl_description = NIF based Erlang bindings for Couchbase -pkg_cberl_homepage = https://github.com/chitika/cberl -pkg_cberl_fetch = git -pkg_cberl_repo = https://github.com/chitika/cberl -pkg_cberl_commit = master - -PACKAGES += cecho -pkg_cecho_name = cecho -pkg_cecho_description = An ncurses library for Erlang -pkg_cecho_homepage = https://github.com/mazenharake/cecho -pkg_cecho_fetch = git -pkg_cecho_repo = https://github.com/mazenharake/cecho -pkg_cecho_commit = master - -PACKAGES += cferl -pkg_cferl_name = cferl -pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client -pkg_cferl_homepage = https://github.com/ddossot/cferl -pkg_cferl_fetch = git -pkg_cferl_repo = https://github.com/ddossot/cferl -pkg_cferl_commit = master - -PACKAGES += chaos_monkey -pkg_chaos_monkey_name = chaos_monkey -pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. -pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey -pkg_chaos_monkey_fetch = git -pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey -pkg_chaos_monkey_commit = master - -PACKAGES += check_node -pkg_check_node_name = check_node -pkg_check_node_description = Nagios Scripts for monitoring Riak -pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios -pkg_check_node_fetch = git -pkg_check_node_repo = https://github.com/basho-labs/riak_nagios -pkg_check_node_commit = master - -PACKAGES += chronos -pkg_chronos_name = chronos -pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. -pkg_chronos_homepage = https://github.com/lehoff/chronos -pkg_chronos_fetch = git -pkg_chronos_repo = https://github.com/lehoff/chronos -pkg_chronos_commit = master - -PACKAGES += chumak -pkg_chumak_name = chumak -pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol. -pkg_chumak_homepage = http://choven.ca -pkg_chumak_fetch = git -pkg_chumak_repo = https://github.com/chovencorp/chumak -pkg_chumak_commit = master - -PACKAGES += cl -pkg_cl_name = cl -pkg_cl_description = OpenCL binding for Erlang -pkg_cl_homepage = https://github.com/tonyrog/cl -pkg_cl_fetch = git -pkg_cl_repo = https://github.com/tonyrog/cl -pkg_cl_commit = master - -PACKAGES += clique -pkg_clique_name = clique -pkg_clique_description = CLI Framework for Erlang -pkg_clique_homepage = https://github.com/basho/clique -pkg_clique_fetch = git -pkg_clique_repo = https://github.com/basho/clique -pkg_clique_commit = develop - -PACKAGES += cloudi_core -pkg_cloudi_core_name = cloudi_core -pkg_cloudi_core_description = CloudI internal service runtime -pkg_cloudi_core_homepage = http://cloudi.org/ -pkg_cloudi_core_fetch = git -pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core -pkg_cloudi_core_commit = master - -PACKAGES += cloudi_service_api_requests -pkg_cloudi_service_api_requests_name = cloudi_service_api_requests -pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) -pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ -pkg_cloudi_service_api_requests_fetch = git -pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests -pkg_cloudi_service_api_requests_commit = master - -PACKAGES += cloudi_service_db -pkg_cloudi_service_db_name = cloudi_service_db -pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) -pkg_cloudi_service_db_homepage = http://cloudi.org/ -pkg_cloudi_service_db_fetch = git -pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db -pkg_cloudi_service_db_commit = master - -PACKAGES += cloudi_service_db_cassandra -pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra -pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service -pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ -pkg_cloudi_service_db_cassandra_fetch = git -pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra -pkg_cloudi_service_db_cassandra_commit = master - -PACKAGES += cloudi_service_db_cassandra_cql -pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql -pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service -pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ -pkg_cloudi_service_db_cassandra_cql_fetch = git -pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql -pkg_cloudi_service_db_cassandra_cql_commit = master - -PACKAGES += cloudi_service_db_couchdb -pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb -pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service -pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ -pkg_cloudi_service_db_couchdb_fetch = git -pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb -pkg_cloudi_service_db_couchdb_commit = master - -PACKAGES += cloudi_service_db_elasticsearch -pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch -pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service -pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ -pkg_cloudi_service_db_elasticsearch_fetch = git -pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch -pkg_cloudi_service_db_elasticsearch_commit = master - -PACKAGES += cloudi_service_db_memcached -pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached -pkg_cloudi_service_db_memcached_description = memcached CloudI Service -pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ -pkg_cloudi_service_db_memcached_fetch = git -pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached -pkg_cloudi_service_db_memcached_commit = master - -PACKAGES += cloudi_service_db_mysql -pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql -pkg_cloudi_service_db_mysql_description = MySQL CloudI Service -pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ -pkg_cloudi_service_db_mysql_fetch = git -pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql -pkg_cloudi_service_db_mysql_commit = master - -PACKAGES += cloudi_service_db_pgsql -pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql -pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service -pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ -pkg_cloudi_service_db_pgsql_fetch = git -pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql -pkg_cloudi_service_db_pgsql_commit = master - -PACKAGES += cloudi_service_db_riak -pkg_cloudi_service_db_riak_name = cloudi_service_db_riak -pkg_cloudi_service_db_riak_description = Riak CloudI Service -pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ -pkg_cloudi_service_db_riak_fetch = git -pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak -pkg_cloudi_service_db_riak_commit = master - -PACKAGES += cloudi_service_db_tokyotyrant -pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant -pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service -pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ -pkg_cloudi_service_db_tokyotyrant_fetch = git -pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant -pkg_cloudi_service_db_tokyotyrant_commit = master - -PACKAGES += cloudi_service_filesystem -pkg_cloudi_service_filesystem_name = cloudi_service_filesystem -pkg_cloudi_service_filesystem_description = Filesystem CloudI Service -pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ -pkg_cloudi_service_filesystem_fetch = git -pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem -pkg_cloudi_service_filesystem_commit = master - -PACKAGES += cloudi_service_http_client -pkg_cloudi_service_http_client_name = cloudi_service_http_client -pkg_cloudi_service_http_client_description = HTTP client CloudI Service -pkg_cloudi_service_http_client_homepage = http://cloudi.org/ -pkg_cloudi_service_http_client_fetch = git -pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client -pkg_cloudi_service_http_client_commit = master - -PACKAGES += cloudi_service_http_cowboy -pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy -pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service -pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ -pkg_cloudi_service_http_cowboy_fetch = git -pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy -pkg_cloudi_service_http_cowboy_commit = master - -PACKAGES += cloudi_service_http_elli -pkg_cloudi_service_http_elli_name = cloudi_service_http_elli -pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service -pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ -pkg_cloudi_service_http_elli_fetch = git -pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli -pkg_cloudi_service_http_elli_commit = master - -PACKAGES += cloudi_service_map_reduce -pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce -pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service -pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ -pkg_cloudi_service_map_reduce_fetch = git -pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce -pkg_cloudi_service_map_reduce_commit = master - -PACKAGES += cloudi_service_oauth1 -pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 -pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service -pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ -pkg_cloudi_service_oauth1_fetch = git -pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 -pkg_cloudi_service_oauth1_commit = master - -PACKAGES += cloudi_service_queue -pkg_cloudi_service_queue_name = cloudi_service_queue -pkg_cloudi_service_queue_description = Persistent Queue Service -pkg_cloudi_service_queue_homepage = http://cloudi.org/ -pkg_cloudi_service_queue_fetch = git -pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue -pkg_cloudi_service_queue_commit = master - -PACKAGES += cloudi_service_quorum -pkg_cloudi_service_quorum_name = cloudi_service_quorum -pkg_cloudi_service_quorum_description = CloudI Quorum Service -pkg_cloudi_service_quorum_homepage = http://cloudi.org/ -pkg_cloudi_service_quorum_fetch = git -pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum -pkg_cloudi_service_quorum_commit = master - -PACKAGES += cloudi_service_router -pkg_cloudi_service_router_name = cloudi_service_router -pkg_cloudi_service_router_description = CloudI Router Service -pkg_cloudi_service_router_homepage = http://cloudi.org/ -pkg_cloudi_service_router_fetch = git -pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router -pkg_cloudi_service_router_commit = master - -PACKAGES += cloudi_service_tcp -pkg_cloudi_service_tcp_name = cloudi_service_tcp -pkg_cloudi_service_tcp_description = TCP CloudI Service -pkg_cloudi_service_tcp_homepage = http://cloudi.org/ -pkg_cloudi_service_tcp_fetch = git -pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp -pkg_cloudi_service_tcp_commit = master - -PACKAGES += cloudi_service_timers -pkg_cloudi_service_timers_name = cloudi_service_timers -pkg_cloudi_service_timers_description = Timers CloudI Service -pkg_cloudi_service_timers_homepage = http://cloudi.org/ -pkg_cloudi_service_timers_fetch = git -pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers -pkg_cloudi_service_timers_commit = master - -PACKAGES += cloudi_service_udp -pkg_cloudi_service_udp_name = cloudi_service_udp -pkg_cloudi_service_udp_description = UDP CloudI Service -pkg_cloudi_service_udp_homepage = http://cloudi.org/ -pkg_cloudi_service_udp_fetch = git -pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp -pkg_cloudi_service_udp_commit = master - -PACKAGES += cloudi_service_validate -pkg_cloudi_service_validate_name = cloudi_service_validate -pkg_cloudi_service_validate_description = CloudI Validate Service -pkg_cloudi_service_validate_homepage = http://cloudi.org/ -pkg_cloudi_service_validate_fetch = git -pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate -pkg_cloudi_service_validate_commit = master - -PACKAGES += cloudi_service_zeromq -pkg_cloudi_service_zeromq_name = cloudi_service_zeromq -pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service -pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ -pkg_cloudi_service_zeromq_fetch = git -pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq -pkg_cloudi_service_zeromq_commit = master - -PACKAGES += cluster_info -pkg_cluster_info_name = cluster_info -pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app -pkg_cluster_info_homepage = https://github.com/basho/cluster_info -pkg_cluster_info_fetch = git -pkg_cluster_info_repo = https://github.com/basho/cluster_info -pkg_cluster_info_commit = master - -PACKAGES += color -pkg_color_name = color -pkg_color_description = ANSI colors for your Erlang -pkg_color_homepage = https://github.com/julianduque/erlang-color -pkg_color_fetch = git -pkg_color_repo = https://github.com/julianduque/erlang-color -pkg_color_commit = master - -PACKAGES += confetti -pkg_confetti_name = confetti -pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids -pkg_confetti_homepage = https://github.com/jtendo/confetti -pkg_confetti_fetch = git -pkg_confetti_repo = https://github.com/jtendo/confetti -pkg_confetti_commit = master - -PACKAGES += couchbeam -pkg_couchbeam_name = couchbeam -pkg_couchbeam_description = Apache CouchDB client in Erlang -pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam -pkg_couchbeam_fetch = git -pkg_couchbeam_repo = https://github.com/benoitc/couchbeam -pkg_couchbeam_commit = master - -PACKAGES += covertool -pkg_covertool_name = covertool -pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports -pkg_covertool_homepage = https://github.com/idubrov/covertool -pkg_covertool_fetch = git -pkg_covertool_repo = https://github.com/idubrov/covertool -pkg_covertool_commit = master - PACKAGES += cowboy pkg_cowboy_name = cowboy pkg_cowboy_description = Small, fast and modular HTTP server. pkg_cowboy_homepage = http://ninenines.eu pkg_cowboy_fetch = git pkg_cowboy_repo = https://github.com/ninenines/cowboy -pkg_cowboy_commit = 1.0.4 - -PACKAGES += cowdb -pkg_cowdb_name = cowdb -pkg_cowdb_description = Pure Key/Value database library for Erlang Applications -pkg_cowdb_homepage = https://github.com/refuge/cowdb -pkg_cowdb_fetch = git -pkg_cowdb_repo = https://github.com/refuge/cowdb -pkg_cowdb_commit = master +pkg_cowboy_commit = master PACKAGES += cowlib pkg_cowlib_name = cowlib @@ -927,671 +313,7 @@ pkg_cowlib_description = Support library for manipulating Web protocols. pkg_cowlib_homepage = http://ninenines.eu pkg_cowlib_fetch = git pkg_cowlib_repo = https://github.com/ninenines/cowlib -pkg_cowlib_commit = 1.0.2 - -PACKAGES += cpg -pkg_cpg_name = cpg -pkg_cpg_description = CloudI Process Groups -pkg_cpg_homepage = https://github.com/okeuday/cpg -pkg_cpg_fetch = git -pkg_cpg_repo = https://github.com/okeuday/cpg -pkg_cpg_commit = master - -PACKAGES += cqerl -pkg_cqerl_name = cqerl -pkg_cqerl_description = Native Erlang CQL client for Cassandra -pkg_cqerl_homepage = https://matehat.github.io/cqerl/ -pkg_cqerl_fetch = git -pkg_cqerl_repo = https://github.com/matehat/cqerl -pkg_cqerl_commit = master - -PACKAGES += cr -pkg_cr_name = cr -pkg_cr_description = Chain Replication -pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm -pkg_cr_fetch = git -pkg_cr_repo = https://github.com/spawnproc/cr -pkg_cr_commit = master - -PACKAGES += cuttlefish -pkg_cuttlefish_name = cuttlefish -pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? -pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish -pkg_cuttlefish_fetch = git -pkg_cuttlefish_repo = https://github.com/basho/cuttlefish -pkg_cuttlefish_commit = master - -PACKAGES += damocles -pkg_damocles_name = damocles -pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. -pkg_damocles_homepage = https://github.com/lostcolony/damocles -pkg_damocles_fetch = git -pkg_damocles_repo = https://github.com/lostcolony/damocles -pkg_damocles_commit = master - -PACKAGES += debbie -pkg_debbie_name = debbie -pkg_debbie_description = .DEB Built In Erlang -pkg_debbie_homepage = https://github.com/crownedgrouse/debbie -pkg_debbie_fetch = git -pkg_debbie_repo = https://github.com/crownedgrouse/debbie -pkg_debbie_commit = master - -PACKAGES += decimal -pkg_decimal_name = decimal -pkg_decimal_description = An Erlang decimal arithmetic library -pkg_decimal_homepage = https://github.com/tim/erlang-decimal -pkg_decimal_fetch = git -pkg_decimal_repo = https://github.com/tim/erlang-decimal -pkg_decimal_commit = master - -PACKAGES += detergent -pkg_detergent_name = detergent -pkg_detergent_description = An emulsifying Erlang SOAP library -pkg_detergent_homepage = https://github.com/devinus/detergent -pkg_detergent_fetch = git -pkg_detergent_repo = https://github.com/devinus/detergent -pkg_detergent_commit = master - -PACKAGES += detest -pkg_detest_name = detest -pkg_detest_description = Tool for running tests on a cluster of erlang nodes -pkg_detest_homepage = https://github.com/biokoda/detest -pkg_detest_fetch = git -pkg_detest_repo = https://github.com/biokoda/detest -pkg_detest_commit = master - -PACKAGES += dh_date -pkg_dh_date_name = dh_date -pkg_dh_date_description = Date formatting / parsing library for erlang -pkg_dh_date_homepage = https://github.com/daleharvey/dh_date -pkg_dh_date_fetch = git -pkg_dh_date_repo = https://github.com/daleharvey/dh_date -pkg_dh_date_commit = master - -PACKAGES += dirbusterl -pkg_dirbusterl_name = dirbusterl -pkg_dirbusterl_description = DirBuster successor in Erlang -pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl -pkg_dirbusterl_fetch = git -pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl -pkg_dirbusterl_commit = master - -PACKAGES += dispcount -pkg_dispcount_name = dispcount -pkg_dispcount_description = Erlang task dispatcher based on ETS counters. -pkg_dispcount_homepage = https://github.com/ferd/dispcount -pkg_dispcount_fetch = git -pkg_dispcount_repo = https://github.com/ferd/dispcount -pkg_dispcount_commit = master - -PACKAGES += dlhttpc -pkg_dlhttpc_name = dlhttpc -pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints -pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc -pkg_dlhttpc_fetch = git -pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc -pkg_dlhttpc_commit = master - -PACKAGES += dns -pkg_dns_name = dns -pkg_dns_description = Erlang DNS library -pkg_dns_homepage = https://github.com/aetrion/dns_erlang -pkg_dns_fetch = git -pkg_dns_repo = https://github.com/aetrion/dns_erlang -pkg_dns_commit = master - -PACKAGES += dnssd -pkg_dnssd_name = dnssd -pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation -pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang -pkg_dnssd_fetch = git -pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang -pkg_dnssd_commit = master - -PACKAGES += dynamic_compile -pkg_dynamic_compile_name = dynamic_compile -pkg_dynamic_compile_description = compile and load erlang modules from string input -pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile -pkg_dynamic_compile_fetch = git -pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile -pkg_dynamic_compile_commit = master - -PACKAGES += e2 -pkg_e2_name = e2 -pkg_e2_description = Library to simply writing correct OTP applications. -pkg_e2_homepage = http://e2project.org -pkg_e2_fetch = git -pkg_e2_repo = https://github.com/gar1t/e2 -pkg_e2_commit = master - -PACKAGES += eamf -pkg_eamf_name = eamf -pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang -pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf -pkg_eamf_fetch = git -pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf -pkg_eamf_commit = master - -PACKAGES += eavro -pkg_eavro_name = eavro -pkg_eavro_description = Apache Avro encoder/decoder -pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro -pkg_eavro_fetch = git -pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro -pkg_eavro_commit = master - -PACKAGES += ecapnp -pkg_ecapnp_name = ecapnp -pkg_ecapnp_description = Cap'n Proto library for Erlang -pkg_ecapnp_homepage = https://github.com/kaos/ecapnp -pkg_ecapnp_fetch = git -pkg_ecapnp_repo = https://github.com/kaos/ecapnp -pkg_ecapnp_commit = master - -PACKAGES += econfig -pkg_econfig_name = econfig -pkg_econfig_description = simple Erlang config handler using INI files -pkg_econfig_homepage = https://github.com/benoitc/econfig -pkg_econfig_fetch = git -pkg_econfig_repo = https://github.com/benoitc/econfig -pkg_econfig_commit = master - -PACKAGES += edate -pkg_edate_name = edate -pkg_edate_description = date manipulation library for erlang -pkg_edate_homepage = https://github.com/dweldon/edate -pkg_edate_fetch = git -pkg_edate_repo = https://github.com/dweldon/edate -pkg_edate_commit = master - -PACKAGES += edgar -pkg_edgar_name = edgar -pkg_edgar_description = Erlang Does GNU AR -pkg_edgar_homepage = https://github.com/crownedgrouse/edgar -pkg_edgar_fetch = git -pkg_edgar_repo = https://github.com/crownedgrouse/edgar -pkg_edgar_commit = master - -PACKAGES += edis -pkg_edis_name = edis -pkg_edis_description = An Erlang implementation of Redis KV Store -pkg_edis_homepage = http://inaka.github.com/edis/ -pkg_edis_fetch = git -pkg_edis_repo = https://github.com/inaka/edis -pkg_edis_commit = master - -PACKAGES += edns -pkg_edns_name = edns -pkg_edns_description = Erlang/OTP DNS server -pkg_edns_homepage = https://github.com/hcvst/erlang-dns -pkg_edns_fetch = git -pkg_edns_repo = https://github.com/hcvst/erlang-dns -pkg_edns_commit = master - -PACKAGES += edown -pkg_edown_name = edown -pkg_edown_description = EDoc extension for generating Github-flavored Markdown -pkg_edown_homepage = https://github.com/uwiger/edown -pkg_edown_fetch = git -pkg_edown_repo = https://github.com/uwiger/edown -pkg_edown_commit = master - -PACKAGES += eep -pkg_eep_name = eep -pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy -pkg_eep_homepage = https://github.com/virtan/eep -pkg_eep_fetch = git -pkg_eep_repo = https://github.com/virtan/eep -pkg_eep_commit = master - -PACKAGES += eep_app -pkg_eep_app_name = eep_app -pkg_eep_app_description = Embedded Event Processing -pkg_eep_app_homepage = https://github.com/darach/eep-erl -pkg_eep_app_fetch = git -pkg_eep_app_repo = https://github.com/darach/eep-erl -pkg_eep_app_commit = master - -PACKAGES += efene -pkg_efene_name = efene -pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX -pkg_efene_homepage = https://github.com/efene/efene -pkg_efene_fetch = git -pkg_efene_repo = https://github.com/efene/efene -pkg_efene_commit = master - -PACKAGES += egeoip -pkg_egeoip_name = egeoip -pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. -pkg_egeoip_homepage = https://github.com/mochi/egeoip -pkg_egeoip_fetch = git -pkg_egeoip_repo = https://github.com/mochi/egeoip -pkg_egeoip_commit = master - -PACKAGES += ehsa -pkg_ehsa_name = ehsa -pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules -pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa -pkg_ehsa_fetch = hg -pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa -pkg_ehsa_commit = default - -PACKAGES += ej -pkg_ej_name = ej -pkg_ej_description = Helper module for working with Erlang terms representing JSON -pkg_ej_homepage = https://github.com/seth/ej -pkg_ej_fetch = git -pkg_ej_repo = https://github.com/seth/ej -pkg_ej_commit = master - -PACKAGES += ejabberd -pkg_ejabberd_name = ejabberd -pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform -pkg_ejabberd_homepage = https://github.com/processone/ejabberd -pkg_ejabberd_fetch = git -pkg_ejabberd_repo = https://github.com/processone/ejabberd -pkg_ejabberd_commit = master - -PACKAGES += ejwt -pkg_ejwt_name = ejwt -pkg_ejwt_description = erlang library for JSON Web Token -pkg_ejwt_homepage = https://github.com/artefactop/ejwt -pkg_ejwt_fetch = git -pkg_ejwt_repo = https://github.com/artefactop/ejwt -pkg_ejwt_commit = master - -PACKAGES += ekaf -pkg_ekaf_name = ekaf -pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. -pkg_ekaf_homepage = https://github.com/helpshift/ekaf -pkg_ekaf_fetch = git -pkg_ekaf_repo = https://github.com/helpshift/ekaf -pkg_ekaf_commit = master - -PACKAGES += elarm -pkg_elarm_name = elarm -pkg_elarm_description = Alarm Manager for Erlang. -pkg_elarm_homepage = https://github.com/esl/elarm -pkg_elarm_fetch = git -pkg_elarm_repo = https://github.com/esl/elarm -pkg_elarm_commit = master - -PACKAGES += eleveldb -pkg_eleveldb_name = eleveldb -pkg_eleveldb_description = Erlang LevelDB API -pkg_eleveldb_homepage = https://github.com/basho/eleveldb -pkg_eleveldb_fetch = git -pkg_eleveldb_repo = https://github.com/basho/eleveldb -pkg_eleveldb_commit = master - -PACKAGES += elixir -pkg_elixir_name = elixir -pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications -pkg_elixir_homepage = https://elixir-lang.org/ -pkg_elixir_fetch = git -pkg_elixir_repo = https://github.com/elixir-lang/elixir -pkg_elixir_commit = master - -PACKAGES += elli -pkg_elli_name = elli -pkg_elli_description = Simple, robust and performant Erlang web server -pkg_elli_homepage = https://github.com/elli-lib/elli -pkg_elli_fetch = git -pkg_elli_repo = https://github.com/elli-lib/elli -pkg_elli_commit = master - -PACKAGES += elvis -pkg_elvis_name = elvis -pkg_elvis_description = Erlang Style Reviewer -pkg_elvis_homepage = https://github.com/inaka/elvis -pkg_elvis_fetch = git -pkg_elvis_repo = https://github.com/inaka/elvis -pkg_elvis_commit = master - -PACKAGES += emagick -pkg_emagick_name = emagick -pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. -pkg_emagick_homepage = https://github.com/kivra/emagick -pkg_emagick_fetch = git -pkg_emagick_repo = https://github.com/kivra/emagick -pkg_emagick_commit = master - -PACKAGES += emysql -pkg_emysql_name = emysql -pkg_emysql_description = Stable, pure Erlang MySQL driver. -pkg_emysql_homepage = https://github.com/Eonblast/Emysql -pkg_emysql_fetch = git -pkg_emysql_repo = https://github.com/Eonblast/Emysql -pkg_emysql_commit = master - -PACKAGES += enm -pkg_enm_name = enm -pkg_enm_description = Erlang driver for nanomsg -pkg_enm_homepage = https://github.com/basho/enm -pkg_enm_fetch = git -pkg_enm_repo = https://github.com/basho/enm -pkg_enm_commit = master - -PACKAGES += entop -pkg_entop_name = entop -pkg_entop_description = A top-like tool for monitoring an Erlang node -pkg_entop_homepage = https://github.com/mazenharake/entop -pkg_entop_fetch = git -pkg_entop_repo = https://github.com/mazenharake/entop -pkg_entop_commit = master - -PACKAGES += epcap -pkg_epcap_name = epcap -pkg_epcap_description = Erlang packet capture interface using pcap -pkg_epcap_homepage = https://github.com/msantos/epcap -pkg_epcap_fetch = git -pkg_epcap_repo = https://github.com/msantos/epcap -pkg_epcap_commit = master - -PACKAGES += eper -pkg_eper_name = eper -pkg_eper_description = Erlang performance and debugging tools. -pkg_eper_homepage = https://github.com/massemanet/eper -pkg_eper_fetch = git -pkg_eper_repo = https://github.com/massemanet/eper -pkg_eper_commit = master - -PACKAGES += epgsql -pkg_epgsql_name = epgsql -pkg_epgsql_description = Erlang PostgreSQL client library. -pkg_epgsql_homepage = https://github.com/epgsql/epgsql -pkg_epgsql_fetch = git -pkg_epgsql_repo = https://github.com/epgsql/epgsql -pkg_epgsql_commit = master - -PACKAGES += episcina -pkg_episcina_name = episcina -pkg_episcina_description = A simple non intrusive resource pool for connections -pkg_episcina_homepage = https://github.com/erlware/episcina -pkg_episcina_fetch = git -pkg_episcina_repo = https://github.com/erlware/episcina -pkg_episcina_commit = master - -PACKAGES += eplot -pkg_eplot_name = eplot -pkg_eplot_description = A plot engine written in erlang. -pkg_eplot_homepage = https://github.com/psyeugenic/eplot -pkg_eplot_fetch = git -pkg_eplot_repo = https://github.com/psyeugenic/eplot -pkg_eplot_commit = master - -PACKAGES += epocxy -pkg_epocxy_name = epocxy -pkg_epocxy_description = Erlang Patterns of Concurrency -pkg_epocxy_homepage = https://github.com/duomark/epocxy -pkg_epocxy_fetch = git -pkg_epocxy_repo = https://github.com/duomark/epocxy -pkg_epocxy_commit = master - -PACKAGES += epubnub -pkg_epubnub_name = epubnub -pkg_epubnub_description = Erlang PubNub API -pkg_epubnub_homepage = https://github.com/tsloughter/epubnub -pkg_epubnub_fetch = git -pkg_epubnub_repo = https://github.com/tsloughter/epubnub -pkg_epubnub_commit = master - -PACKAGES += eqm -pkg_eqm_name = eqm -pkg_eqm_description = Erlang pub sub with supply-demand channels -pkg_eqm_homepage = https://github.com/loucash/eqm -pkg_eqm_fetch = git -pkg_eqm_repo = https://github.com/loucash/eqm -pkg_eqm_commit = master - -PACKAGES += eredis -pkg_eredis_name = eredis -pkg_eredis_description = Erlang Redis client -pkg_eredis_homepage = https://github.com/wooga/eredis -pkg_eredis_fetch = git -pkg_eredis_repo = https://github.com/wooga/eredis -pkg_eredis_commit = master - -PACKAGES += eredis_pool -pkg_eredis_pool_name = eredis_pool -pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. -pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool -pkg_eredis_pool_fetch = git -pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool -pkg_eredis_pool_commit = master - -PACKAGES += erl_streams -pkg_erl_streams_name = erl_streams -pkg_erl_streams_description = Streams in Erlang -pkg_erl_streams_homepage = https://github.com/epappas/erl_streams -pkg_erl_streams_fetch = git -pkg_erl_streams_repo = https://github.com/epappas/erl_streams -pkg_erl_streams_commit = master - -PACKAGES += erlang_cep -pkg_erlang_cep_name = erlang_cep -pkg_erlang_cep_description = A basic CEP package written in erlang -pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep -pkg_erlang_cep_fetch = git -pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep -pkg_erlang_cep_commit = master - -PACKAGES += erlang_js -pkg_erlang_js_name = erlang_js -pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. -pkg_erlang_js_homepage = https://github.com/basho/erlang_js -pkg_erlang_js_fetch = git -pkg_erlang_js_repo = https://github.com/basho/erlang_js -pkg_erlang_js_commit = master - -PACKAGES += erlang_localtime -pkg_erlang_localtime_name = erlang_localtime -pkg_erlang_localtime_description = Erlang library for conversion from one local time to another -pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime -pkg_erlang_localtime_fetch = git -pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime -pkg_erlang_localtime_commit = master - -PACKAGES += erlang_smtp -pkg_erlang_smtp_name = erlang_smtp -pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. -pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp -pkg_erlang_smtp_fetch = git -pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp -pkg_erlang_smtp_commit = master - -PACKAGES += erlang_term -pkg_erlang_term_name = erlang_term -pkg_erlang_term_description = Erlang Term Info -pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term -pkg_erlang_term_fetch = git -pkg_erlang_term_repo = https://github.com/okeuday/erlang_term -pkg_erlang_term_commit = master - -PACKAGES += erlastic_search -pkg_erlastic_search_name = erlastic_search -pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. -pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search -pkg_erlastic_search_fetch = git -pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search -pkg_erlastic_search_commit = master - -PACKAGES += erlasticsearch -pkg_erlasticsearch_name = erlasticsearch -pkg_erlasticsearch_description = Erlang thrift interface to elastic_search -pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch -pkg_erlasticsearch_fetch = git -pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch -pkg_erlasticsearch_commit = master - -PACKAGES += erlbrake -pkg_erlbrake_name = erlbrake -pkg_erlbrake_description = Erlang Airbrake notification client -pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake -pkg_erlbrake_fetch = git -pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake -pkg_erlbrake_commit = master - -PACKAGES += erlcloud -pkg_erlcloud_name = erlcloud -pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) -pkg_erlcloud_homepage = https://github.com/gleber/erlcloud -pkg_erlcloud_fetch = git -pkg_erlcloud_repo = https://github.com/gleber/erlcloud -pkg_erlcloud_commit = master - -PACKAGES += erlcron -pkg_erlcron_name = erlcron -pkg_erlcron_description = Erlang cronish system -pkg_erlcron_homepage = https://github.com/erlware/erlcron -pkg_erlcron_fetch = git -pkg_erlcron_repo = https://github.com/erlware/erlcron -pkg_erlcron_commit = master - -PACKAGES += erldb -pkg_erldb_name = erldb -pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang -pkg_erldb_homepage = http://erldb.org -pkg_erldb_fetch = git -pkg_erldb_repo = https://github.com/erldb/erldb -pkg_erldb_commit = master - -PACKAGES += erldis -pkg_erldis_name = erldis -pkg_erldis_description = redis erlang client library -pkg_erldis_homepage = https://github.com/cstar/erldis -pkg_erldis_fetch = git -pkg_erldis_repo = https://github.com/cstar/erldis -pkg_erldis_commit = master - -PACKAGES += erldns -pkg_erldns_name = erldns -pkg_erldns_description = DNS server, in erlang. -pkg_erldns_homepage = https://github.com/aetrion/erl-dns -pkg_erldns_fetch = git -pkg_erldns_repo = https://github.com/aetrion/erl-dns -pkg_erldns_commit = master - -PACKAGES += erldocker -pkg_erldocker_name = erldocker -pkg_erldocker_description = Docker Remote API client for Erlang -pkg_erldocker_homepage = https://github.com/proger/erldocker -pkg_erldocker_fetch = git -pkg_erldocker_repo = https://github.com/proger/erldocker -pkg_erldocker_commit = master - -PACKAGES += erlfsmon -pkg_erlfsmon_name = erlfsmon -pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX -pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon -pkg_erlfsmon_fetch = git -pkg_erlfsmon_repo = https://github.com/proger/erlfsmon -pkg_erlfsmon_commit = master - -PACKAGES += erlgit -pkg_erlgit_name = erlgit -pkg_erlgit_description = Erlang convenience wrapper around git executable -pkg_erlgit_homepage = https://github.com/gleber/erlgit -pkg_erlgit_fetch = git -pkg_erlgit_repo = https://github.com/gleber/erlgit -pkg_erlgit_commit = master - -PACKAGES += erlguten -pkg_erlguten_name = erlguten -pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. -pkg_erlguten_homepage = https://github.com/richcarl/erlguten -pkg_erlguten_fetch = git -pkg_erlguten_repo = https://github.com/richcarl/erlguten -pkg_erlguten_commit = master - -PACKAGES += erlmc -pkg_erlmc_name = erlmc -pkg_erlmc_description = Erlang memcached binary protocol client -pkg_erlmc_homepage = https://github.com/jkvor/erlmc -pkg_erlmc_fetch = git -pkg_erlmc_repo = https://github.com/jkvor/erlmc -pkg_erlmc_commit = master - -PACKAGES += erlmongo -pkg_erlmongo_name = erlmongo -pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support -pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo -pkg_erlmongo_fetch = git -pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo -pkg_erlmongo_commit = master - -PACKAGES += erlog -pkg_erlog_name = erlog -pkg_erlog_description = Prolog interpreter in and for Erlang -pkg_erlog_homepage = https://github.com/rvirding/erlog -pkg_erlog_fetch = git -pkg_erlog_repo = https://github.com/rvirding/erlog -pkg_erlog_commit = master - -PACKAGES += erlpass -pkg_erlpass_name = erlpass -pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. -pkg_erlpass_homepage = https://github.com/ferd/erlpass -pkg_erlpass_fetch = git -pkg_erlpass_repo = https://github.com/ferd/erlpass -pkg_erlpass_commit = master - -PACKAGES += erlport -pkg_erlport_name = erlport -pkg_erlport_description = ErlPort - connect Erlang to other languages -pkg_erlport_homepage = https://github.com/hdima/erlport -pkg_erlport_fetch = git -pkg_erlport_repo = https://github.com/hdima/erlport -pkg_erlport_commit = master - -PACKAGES += erlsh -pkg_erlsh_name = erlsh -pkg_erlsh_description = Erlang shell tools -pkg_erlsh_homepage = https://github.com/proger/erlsh -pkg_erlsh_fetch = git -pkg_erlsh_repo = https://github.com/proger/erlsh -pkg_erlsh_commit = master - -PACKAGES += erlsha2 -pkg_erlsha2_name = erlsha2 -pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. -pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 -pkg_erlsha2_fetch = git -pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 -pkg_erlsha2_commit = master - -PACKAGES += erlsom -pkg_erlsom_name = erlsom -pkg_erlsom_description = XML parser for Erlang -pkg_erlsom_homepage = https://github.com/willemdj/erlsom -pkg_erlsom_fetch = git -pkg_erlsom_repo = https://github.com/willemdj/erlsom -pkg_erlsom_commit = master - -PACKAGES += erlubi -pkg_erlubi_name = erlubi -pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) -pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi -pkg_erlubi_fetch = git -pkg_erlubi_repo = https://github.com/krestenkrab/erlubi -pkg_erlubi_commit = master - -PACKAGES += erlvolt -pkg_erlvolt_name = erlvolt -pkg_erlvolt_description = VoltDB Erlang Client Driver -pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang -pkg_erlvolt_fetch = git -pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang -pkg_erlvolt_commit = master - -PACKAGES += erlware_commons -pkg_erlware_commons_name = erlware_commons -pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. -pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons -pkg_erlware_commons_fetch = git -pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons -pkg_erlware_commons_commit = master +pkg_cowlib_commit = master PACKAGES += erlydtl pkg_erlydtl_name = erlydtl @@ -1601,454 +323,6 @@ pkg_erlydtl_fetch = git pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl pkg_erlydtl_commit = master -PACKAGES += errd -pkg_errd_name = errd -pkg_errd_description = Erlang RRDTool library -pkg_errd_homepage = https://github.com/archaelus/errd -pkg_errd_fetch = git -pkg_errd_repo = https://github.com/archaelus/errd -pkg_errd_commit = master - -PACKAGES += erserve -pkg_erserve_name = erserve -pkg_erserve_description = Erlang/Rserve communication interface -pkg_erserve_homepage = https://github.com/del/erserve -pkg_erserve_fetch = git -pkg_erserve_repo = https://github.com/del/erserve -pkg_erserve_commit = master - -PACKAGES += erwa -pkg_erwa_name = erwa -pkg_erwa_description = A WAMP router and client written in Erlang. -pkg_erwa_homepage = https://github.com/bwegh/erwa -pkg_erwa_fetch = git -pkg_erwa_repo = https://github.com/bwegh/erwa -pkg_erwa_commit = master - -PACKAGES += escalus -pkg_escalus_name = escalus -pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers -pkg_escalus_homepage = https://github.com/esl/escalus -pkg_escalus_fetch = git -pkg_escalus_repo = https://github.com/esl/escalus -pkg_escalus_commit = master - -PACKAGES += esh_mk -pkg_esh_mk_name = esh_mk -pkg_esh_mk_description = esh template engine plugin for erlang.mk -pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk -pkg_esh_mk_fetch = git -pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git -pkg_esh_mk_commit = master - -PACKAGES += espec -pkg_espec_name = espec -pkg_espec_description = ESpec: Behaviour driven development framework for Erlang -pkg_espec_homepage = https://github.com/lucaspiller/espec -pkg_espec_fetch = git -pkg_espec_repo = https://github.com/lucaspiller/espec -pkg_espec_commit = master - -PACKAGES += estatsd -pkg_estatsd_name = estatsd -pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite -pkg_estatsd_homepage = https://github.com/RJ/estatsd -pkg_estatsd_fetch = git -pkg_estatsd_repo = https://github.com/RJ/estatsd -pkg_estatsd_commit = master - -PACKAGES += etap -pkg_etap_name = etap -pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. -pkg_etap_homepage = https://github.com/ngerakines/etap -pkg_etap_fetch = git -pkg_etap_repo = https://github.com/ngerakines/etap -pkg_etap_commit = master - -PACKAGES += etest -pkg_etest_name = etest -pkg_etest_description = A lightweight, convention over configuration test framework for Erlang -pkg_etest_homepage = https://github.com/wooga/etest -pkg_etest_fetch = git -pkg_etest_repo = https://github.com/wooga/etest -pkg_etest_commit = master - -PACKAGES += etest_http -pkg_etest_http_name = etest_http -pkg_etest_http_description = etest Assertions around HTTP (client-side) -pkg_etest_http_homepage = https://github.com/wooga/etest_http -pkg_etest_http_fetch = git -pkg_etest_http_repo = https://github.com/wooga/etest_http -pkg_etest_http_commit = master - -PACKAGES += etoml -pkg_etoml_name = etoml -pkg_etoml_description = TOML language erlang parser -pkg_etoml_homepage = https://github.com/kalta/etoml -pkg_etoml_fetch = git -pkg_etoml_repo = https://github.com/kalta/etoml -pkg_etoml_commit = master - -PACKAGES += eunit -pkg_eunit_name = eunit -pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. -pkg_eunit_homepage = https://github.com/richcarl/eunit -pkg_eunit_fetch = git -pkg_eunit_repo = https://github.com/richcarl/eunit -pkg_eunit_commit = master - -PACKAGES += eunit_formatters -pkg_eunit_formatters_name = eunit_formatters -pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. -pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters -pkg_eunit_formatters_fetch = git -pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters -pkg_eunit_formatters_commit = master - -PACKAGES += euthanasia -pkg_euthanasia_name = euthanasia -pkg_euthanasia_description = Merciful killer for your Erlang processes -pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia -pkg_euthanasia_fetch = git -pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia -pkg_euthanasia_commit = master - -PACKAGES += evum -pkg_evum_name = evum -pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM -pkg_evum_homepage = https://github.com/msantos/evum -pkg_evum_fetch = git -pkg_evum_repo = https://github.com/msantos/evum -pkg_evum_commit = master - -PACKAGES += exec -pkg_exec_name = erlexec -pkg_exec_description = Execute and control OS processes from Erlang/OTP. -pkg_exec_homepage = http://saleyn.github.com/erlexec -pkg_exec_fetch = git -pkg_exec_repo = https://github.com/saleyn/erlexec -pkg_exec_commit = master - -PACKAGES += exml -pkg_exml_name = exml -pkg_exml_description = XML parsing library in Erlang -pkg_exml_homepage = https://github.com/paulgray/exml -pkg_exml_fetch = git -pkg_exml_repo = https://github.com/paulgray/exml -pkg_exml_commit = master - -PACKAGES += exometer -pkg_exometer_name = exometer -pkg_exometer_description = Basic measurement objects and probe behavior -pkg_exometer_homepage = https://github.com/Feuerlabs/exometer -pkg_exometer_fetch = git -pkg_exometer_repo = https://github.com/Feuerlabs/exometer -pkg_exometer_commit = master - -PACKAGES += exs1024 -pkg_exs1024_name = exs1024 -pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. -pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 -pkg_exs1024_fetch = git -pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 -pkg_exs1024_commit = master - -PACKAGES += exs64 -pkg_exs64_name = exs64 -pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. -pkg_exs64_homepage = https://github.com/jj1bdx/exs64 -pkg_exs64_fetch = git -pkg_exs64_repo = https://github.com/jj1bdx/exs64 -pkg_exs64_commit = master - -PACKAGES += exsplus116 -pkg_exsplus116_name = exsplus116 -pkg_exsplus116_description = Xorshift116plus for Erlang -pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 -pkg_exsplus116_fetch = git -pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 -pkg_exsplus116_commit = master - -PACKAGES += exsplus128 -pkg_exsplus128_name = exsplus128 -pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. -pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 -pkg_exsplus128_fetch = git -pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 -pkg_exsplus128_commit = master - -PACKAGES += ezmq -pkg_ezmq_name = ezmq -pkg_ezmq_description = zMQ implemented in Erlang -pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq -pkg_ezmq_fetch = git -pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq -pkg_ezmq_commit = master - -PACKAGES += ezmtp -pkg_ezmtp_name = ezmtp -pkg_ezmtp_description = ZMTP protocol in pure Erlang. -pkg_ezmtp_homepage = https://github.com/a13x/ezmtp -pkg_ezmtp_fetch = git -pkg_ezmtp_repo = https://github.com/a13x/ezmtp -pkg_ezmtp_commit = master - -PACKAGES += fast_disk_log -pkg_fast_disk_log_name = fast_disk_log -pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger -pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log -pkg_fast_disk_log_fetch = git -pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log -pkg_fast_disk_log_commit = master - -PACKAGES += feeder -pkg_feeder_name = feeder -pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. -pkg_feeder_homepage = https://github.com/michaelnisi/feeder -pkg_feeder_fetch = git -pkg_feeder_repo = https://github.com/michaelnisi/feeder -pkg_feeder_commit = master - -PACKAGES += find_crate -pkg_find_crate_name = find_crate -pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory -pkg_find_crate_homepage = https://github.com/goertzenator/find_crate -pkg_find_crate_fetch = git -pkg_find_crate_repo = https://github.com/goertzenator/find_crate -pkg_find_crate_commit = master - -PACKAGES += fix -pkg_fix_name = fix -pkg_fix_description = http://fixprotocol.org/ implementation. -pkg_fix_homepage = https://github.com/maxlapshin/fix -pkg_fix_fetch = git -pkg_fix_repo = https://github.com/maxlapshin/fix -pkg_fix_commit = master - -PACKAGES += flower -pkg_flower_name = flower -pkg_flower_description = FlowER - a Erlang OpenFlow development platform -pkg_flower_homepage = https://github.com/travelping/flower -pkg_flower_fetch = git -pkg_flower_repo = https://github.com/travelping/flower -pkg_flower_commit = master - -PACKAGES += fn -pkg_fn_name = fn -pkg_fn_description = Function utilities for Erlang -pkg_fn_homepage = https://github.com/reiddraper/fn -pkg_fn_fetch = git -pkg_fn_repo = https://github.com/reiddraper/fn -pkg_fn_commit = master - -PACKAGES += folsom -pkg_folsom_name = folsom -pkg_folsom_description = Expose Erlang Events and Metrics -pkg_folsom_homepage = https://github.com/boundary/folsom -pkg_folsom_fetch = git -pkg_folsom_repo = https://github.com/boundary/folsom -pkg_folsom_commit = master - -PACKAGES += folsom_cowboy -pkg_folsom_cowboy_name = folsom_cowboy -pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. -pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy -pkg_folsom_cowboy_fetch = git -pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy -pkg_folsom_cowboy_commit = master - -PACKAGES += folsomite -pkg_folsomite_name = folsomite -pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics -pkg_folsomite_homepage = https://github.com/campanja/folsomite -pkg_folsomite_fetch = git -pkg_folsomite_repo = https://github.com/campanja/folsomite -pkg_folsomite_commit = master - -PACKAGES += fs -pkg_fs_name = fs -pkg_fs_description = Erlang FileSystem Listener -pkg_fs_homepage = https://github.com/synrc/fs -pkg_fs_fetch = git -pkg_fs_repo = https://github.com/synrc/fs -pkg_fs_commit = master - -PACKAGES += fuse -pkg_fuse_name = fuse -pkg_fuse_description = A Circuit Breaker for Erlang -pkg_fuse_homepage = https://github.com/jlouis/fuse -pkg_fuse_fetch = git -pkg_fuse_repo = https://github.com/jlouis/fuse -pkg_fuse_commit = master - -PACKAGES += gcm -pkg_gcm_name = gcm -pkg_gcm_description = An Erlang application for Google Cloud Messaging -pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang -pkg_gcm_fetch = git -pkg_gcm_repo = https://github.com/pdincau/gcm-erlang -pkg_gcm_commit = master - -PACKAGES += gcprof -pkg_gcprof_name = gcprof -pkg_gcprof_description = Garbage Collection profiler for Erlang -pkg_gcprof_homepage = https://github.com/knutin/gcprof -pkg_gcprof_fetch = git -pkg_gcprof_repo = https://github.com/knutin/gcprof -pkg_gcprof_commit = master - -PACKAGES += geas -pkg_geas_name = geas -pkg_geas_description = Guess Erlang Application Scattering -pkg_geas_homepage = https://github.com/crownedgrouse/geas -pkg_geas_fetch = git -pkg_geas_repo = https://github.com/crownedgrouse/geas -pkg_geas_commit = master - -PACKAGES += geef -pkg_geef_name = geef -pkg_geef_description = Git NEEEEF (Erlang NIF) -pkg_geef_homepage = https://github.com/carlosmn/geef -pkg_geef_fetch = git -pkg_geef_repo = https://github.com/carlosmn/geef -pkg_geef_commit = master - -PACKAGES += gen_coap -pkg_gen_coap_name = gen_coap -pkg_gen_coap_description = Generic Erlang CoAP Client/Server -pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap -pkg_gen_coap_fetch = git -pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap -pkg_gen_coap_commit = master - -PACKAGES += gen_cycle -pkg_gen_cycle_name = gen_cycle -pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks -pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle -pkg_gen_cycle_fetch = git -pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle -pkg_gen_cycle_commit = develop - -PACKAGES += gen_icmp -pkg_gen_icmp_name = gen_icmp -pkg_gen_icmp_description = Erlang interface to ICMP sockets -pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp -pkg_gen_icmp_fetch = git -pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp -pkg_gen_icmp_commit = master - -PACKAGES += gen_leader -pkg_gen_leader_name = gen_leader -pkg_gen_leader_description = leader election behavior -pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival -pkg_gen_leader_fetch = git -pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival -pkg_gen_leader_commit = master - -PACKAGES += gen_nb_server -pkg_gen_nb_server_name = gen_nb_server -pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers -pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server -pkg_gen_nb_server_fetch = git -pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server -pkg_gen_nb_server_commit = master - -PACKAGES += gen_paxos -pkg_gen_paxos_name = gen_paxos -pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol -pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos -pkg_gen_paxos_fetch = git -pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos -pkg_gen_paxos_commit = master - -PACKAGES += gen_rpc -pkg_gen_rpc_name = gen_rpc -pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages -pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git -pkg_gen_rpc_fetch = git -pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git -pkg_gen_rpc_commit = master - -PACKAGES += gen_smtp -pkg_gen_smtp_name = gen_smtp -pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules -pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp -pkg_gen_smtp_fetch = git -pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp -pkg_gen_smtp_commit = master - -PACKAGES += gen_tracker -pkg_gen_tracker_name = gen_tracker -pkg_gen_tracker_description = supervisor with ets handling of children and their metadata -pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker -pkg_gen_tracker_fetch = git -pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker -pkg_gen_tracker_commit = master - -PACKAGES += gen_unix -pkg_gen_unix_name = gen_unix -pkg_gen_unix_description = Erlang Unix socket interface -pkg_gen_unix_homepage = https://github.com/msantos/gen_unix -pkg_gen_unix_fetch = git -pkg_gen_unix_repo = https://github.com/msantos/gen_unix -pkg_gen_unix_commit = master - -PACKAGES += geode -pkg_geode_name = geode -pkg_geode_description = geohash/proximity lookup in pure, uncut erlang. -pkg_geode_homepage = https://github.com/bradfordw/geode -pkg_geode_fetch = git -pkg_geode_repo = https://github.com/bradfordw/geode -pkg_geode_commit = master - -PACKAGES += getopt -pkg_getopt_name = getopt -pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax -pkg_getopt_homepage = https://github.com/jcomellas/getopt -pkg_getopt_fetch = git -pkg_getopt_repo = https://github.com/jcomellas/getopt -pkg_getopt_commit = master - -PACKAGES += gettext -pkg_gettext_name = gettext -pkg_gettext_description = Erlang internationalization library. -pkg_gettext_homepage = https://github.com/etnt/gettext -pkg_gettext_fetch = git -pkg_gettext_repo = https://github.com/etnt/gettext -pkg_gettext_commit = master - -PACKAGES += giallo -pkg_giallo_name = giallo -pkg_giallo_description = Small and flexible web framework on top of Cowboy -pkg_giallo_homepage = https://github.com/kivra/giallo -pkg_giallo_fetch = git -pkg_giallo_repo = https://github.com/kivra/giallo -pkg_giallo_commit = master - -PACKAGES += gin -pkg_gin_name = gin -pkg_gin_description = The guards and for Erlang parse_transform -pkg_gin_homepage = https://github.com/mad-cocktail/gin -pkg_gin_fetch = git -pkg_gin_repo = https://github.com/mad-cocktail/gin -pkg_gin_commit = master - -PACKAGES += gitty -pkg_gitty_name = gitty -pkg_gitty_description = Git access in erlang -pkg_gitty_homepage = https://github.com/maxlapshin/gitty -pkg_gitty_fetch = git -pkg_gitty_repo = https://github.com/maxlapshin/gitty -pkg_gitty_commit = master - -PACKAGES += gold_fever -pkg_gold_fever_name = gold_fever -pkg_gold_fever_description = A Treasure Hunt for Erlangers -pkg_gold_fever_homepage = https://github.com/inaka/gold_fever -pkg_gold_fever_fetch = git -pkg_gold_fever_repo = https://github.com/inaka/gold_fever -pkg_gold_fever_commit = master - PACKAGES += gpb pkg_gpb_name = gpb pkg_gpb_description = A Google Protobuf implementation for Erlang @@ -2057,38 +331,6 @@ pkg_gpb_fetch = git pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb pkg_gpb_commit = master -PACKAGES += gproc -pkg_gproc_name = gproc -pkg_gproc_description = Extended process registry for Erlang -pkg_gproc_homepage = https://github.com/uwiger/gproc -pkg_gproc_fetch = git -pkg_gproc_repo = https://github.com/uwiger/gproc -pkg_gproc_commit = master - -PACKAGES += grapherl -pkg_grapherl_name = grapherl -pkg_grapherl_description = Create graphs of Erlang systems and programs -pkg_grapherl_homepage = https://github.com/eproxus/grapherl -pkg_grapherl_fetch = git -pkg_grapherl_repo = https://github.com/eproxus/grapherl -pkg_grapherl_commit = master - -PACKAGES += grpc -pkg_grpc_name = grpc -pkg_grpc_description = gRPC server in Erlang -pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc -pkg_grpc_fetch = git -pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc -pkg_grpc_commit = master - -PACKAGES += grpc_client -pkg_grpc_client_name = grpc_client -pkg_grpc_client_description = gRPC client in Erlang -pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client -pkg_grpc_client_fetch = git -pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client -pkg_grpc_client_commit = master - PACKAGES += gun pkg_gun_name = gun pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. @@ -2097,1085 +339,14 @@ pkg_gun_fetch = git pkg_gun_repo = https://github.com/ninenines/gun pkg_gun_commit = master -PACKAGES += gut -pkg_gut_name = gut -pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman -pkg_gut_homepage = https://github.com/unbalancedparentheses/gut -pkg_gut_fetch = git -pkg_gut_repo = https://github.com/unbalancedparentheses/gut -pkg_gut_commit = master - -PACKAGES += hackney -pkg_hackney_name = hackney -pkg_hackney_description = simple HTTP client in Erlang -pkg_hackney_homepage = https://github.com/benoitc/hackney -pkg_hackney_fetch = git -pkg_hackney_repo = https://github.com/benoitc/hackney -pkg_hackney_commit = master - -PACKAGES += hamcrest -pkg_hamcrest_name = hamcrest -pkg_hamcrest_description = Erlang port of Hamcrest -pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang -pkg_hamcrest_fetch = git -pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang -pkg_hamcrest_commit = master - -PACKAGES += hanoidb -pkg_hanoidb_name = hanoidb -pkg_hanoidb_description = Erlang LSM BTree Storage -pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb -pkg_hanoidb_fetch = git -pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb -pkg_hanoidb_commit = master - -PACKAGES += hottub -pkg_hottub_name = hottub -pkg_hottub_description = Permanent Erlang Worker Pool -pkg_hottub_homepage = https://github.com/bfrog/hottub -pkg_hottub_fetch = git -pkg_hottub_repo = https://github.com/bfrog/hottub -pkg_hottub_commit = master - -PACKAGES += hpack -pkg_hpack_name = hpack -pkg_hpack_description = HPACK Implementation for Erlang -pkg_hpack_homepage = https://github.com/joedevivo/hpack -pkg_hpack_fetch = git -pkg_hpack_repo = https://github.com/joedevivo/hpack -pkg_hpack_commit = master - -PACKAGES += hyper -pkg_hyper_name = hyper -pkg_hyper_description = Erlang implementation of HyperLogLog -pkg_hyper_homepage = https://github.com/GameAnalytics/hyper -pkg_hyper_fetch = git -pkg_hyper_repo = https://github.com/GameAnalytics/hyper -pkg_hyper_commit = master - -PACKAGES += i18n -pkg_i18n_name = i18n -pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e) -pkg_i18n_homepage = https://github.com/erlang-unicode/i18n -pkg_i18n_fetch = git -pkg_i18n_repo = https://github.com/erlang-unicode/i18n -pkg_i18n_commit = master - -PACKAGES += ibrowse -pkg_ibrowse_name = ibrowse -pkg_ibrowse_description = Erlang HTTP client -pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse -pkg_ibrowse_fetch = git -pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse -pkg_ibrowse_commit = master - -PACKAGES += idna -pkg_idna_name = idna -pkg_idna_description = Erlang IDNA lib -pkg_idna_homepage = https://github.com/benoitc/erlang-idna -pkg_idna_fetch = git -pkg_idna_repo = https://github.com/benoitc/erlang-idna -pkg_idna_commit = master - -PACKAGES += ierlang -pkg_ierlang_name = ierlang -pkg_ierlang_description = An Erlang language kernel for IPython. -pkg_ierlang_homepage = https://github.com/robbielynch/ierlang -pkg_ierlang_fetch = git -pkg_ierlang_repo = https://github.com/robbielynch/ierlang -pkg_ierlang_commit = master - -PACKAGES += iota -pkg_iota_name = iota -pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code -pkg_iota_homepage = https://github.com/jpgneves/iota -pkg_iota_fetch = git -pkg_iota_repo = https://github.com/jpgneves/iota -pkg_iota_commit = master - -PACKAGES += irc_lib -pkg_irc_lib_name = irc_lib -pkg_irc_lib_description = Erlang irc client library -pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib -pkg_irc_lib_fetch = git -pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib -pkg_irc_lib_commit = master - -PACKAGES += ircd -pkg_ircd_name = ircd -pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. -pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd -pkg_ircd_fetch = git -pkg_ircd_repo = https://github.com/tonyg/erlang-ircd -pkg_ircd_commit = master - -PACKAGES += iris -pkg_iris_name = iris -pkg_iris_description = Iris Erlang binding -pkg_iris_homepage = https://github.com/project-iris/iris-erl -pkg_iris_fetch = git -pkg_iris_repo = https://github.com/project-iris/iris-erl -pkg_iris_commit = master - -PACKAGES += iso8601 -pkg_iso8601_name = iso8601 -pkg_iso8601_description = Erlang ISO 8601 date formatter/parser -pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 -pkg_iso8601_fetch = git -pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 -pkg_iso8601_commit = master - -PACKAGES += jamdb_sybase -pkg_jamdb_sybase_name = jamdb_sybase -pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE -pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase -pkg_jamdb_sybase_fetch = git -pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase -pkg_jamdb_sybase_commit = master - -PACKAGES += jerg -pkg_jerg_name = jerg -pkg_jerg_description = JSON Schema to Erlang Records Generator -pkg_jerg_homepage = https://github.com/ddossot/jerg -pkg_jerg_fetch = git -pkg_jerg_repo = https://github.com/ddossot/jerg -pkg_jerg_commit = master - -PACKAGES += jesse -pkg_jesse_name = jesse -pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. -pkg_jesse_homepage = https://github.com/for-GET/jesse -pkg_jesse_fetch = git -pkg_jesse_repo = https://github.com/for-GET/jesse -pkg_jesse_commit = master - -PACKAGES += jiffy -pkg_jiffy_name = jiffy -pkg_jiffy_description = JSON NIFs for Erlang. -pkg_jiffy_homepage = https://github.com/davisp/jiffy -pkg_jiffy_fetch = git -pkg_jiffy_repo = https://github.com/davisp/jiffy -pkg_jiffy_commit = master - -PACKAGES += jiffy_v -pkg_jiffy_v_name = jiffy_v -pkg_jiffy_v_description = JSON validation utility -pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v -pkg_jiffy_v_fetch = git -pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v -pkg_jiffy_v_commit = master - -PACKAGES += jobs -pkg_jobs_name = jobs -pkg_jobs_description = a Job scheduler for load regulation -pkg_jobs_homepage = https://github.com/esl/jobs -pkg_jobs_fetch = git -pkg_jobs_repo = https://github.com/esl/jobs -pkg_jobs_commit = master - -PACKAGES += joxa -pkg_joxa_name = joxa -pkg_joxa_description = A Modern Lisp for the Erlang VM -pkg_joxa_homepage = https://github.com/joxa/joxa -pkg_joxa_fetch = git -pkg_joxa_repo = https://github.com/joxa/joxa -pkg_joxa_commit = master - -PACKAGES += json -pkg_json_name = json -pkg_json_description = a high level json library for erlang (17.0+) -pkg_json_homepage = https://github.com/talentdeficit/json -pkg_json_fetch = git -pkg_json_repo = https://github.com/talentdeficit/json -pkg_json_commit = master - -PACKAGES += json_rec -pkg_json_rec_name = json_rec -pkg_json_rec_description = JSON to erlang record -pkg_json_rec_homepage = https://github.com/justinkirby/json_rec -pkg_json_rec_fetch = git -pkg_json_rec_repo = https://github.com/justinkirby/json_rec -pkg_json_rec_commit = master - -PACKAGES += jsone -pkg_jsone_name = jsone -pkg_jsone_description = An Erlang library for encoding, decoding JSON data. -pkg_jsone_homepage = https://github.com/sile/jsone.git -pkg_jsone_fetch = git -pkg_jsone_repo = https://github.com/sile/jsone.git -pkg_jsone_commit = master - -PACKAGES += jsonerl -pkg_jsonerl_name = jsonerl -pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder -pkg_jsonerl_homepage = https://github.com/lambder/jsonerl -pkg_jsonerl_fetch = git -pkg_jsonerl_repo = https://github.com/lambder/jsonerl -pkg_jsonerl_commit = master - -PACKAGES += jsonpath -pkg_jsonpath_name = jsonpath -pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation -pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath -pkg_jsonpath_fetch = git -pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath -pkg_jsonpath_commit = master - -PACKAGES += jsonx -pkg_jsonx_name = jsonx -pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. -pkg_jsonx_homepage = https://github.com/iskra/jsonx -pkg_jsonx_fetch = git -pkg_jsonx_repo = https://github.com/iskra/jsonx -pkg_jsonx_commit = master - -PACKAGES += jsx -pkg_jsx_name = jsx -pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. -pkg_jsx_homepage = https://github.com/talentdeficit/jsx -pkg_jsx_fetch = git -pkg_jsx_repo = https://github.com/talentdeficit/jsx -pkg_jsx_commit = main - -PACKAGES += kafka -pkg_kafka_name = kafka -pkg_kafka_description = Kafka consumer and producer in Erlang -pkg_kafka_homepage = https://github.com/wooga/kafka-erlang -pkg_kafka_fetch = git -pkg_kafka_repo = https://github.com/wooga/kafka-erlang -pkg_kafka_commit = master - -PACKAGES += kafka_protocol -pkg_kafka_protocol_name = kafka_protocol -pkg_kafka_protocol_description = Kafka protocol Erlang library -pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol -pkg_kafka_protocol_fetch = git -pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git -pkg_kafka_protocol_commit = master - -PACKAGES += kai -pkg_kai_name = kai -pkg_kai_description = DHT storage by Takeshi Inoue -pkg_kai_homepage = https://github.com/synrc/kai -pkg_kai_fetch = git -pkg_kai_repo = https://github.com/synrc/kai -pkg_kai_commit = master - -PACKAGES += katja -pkg_katja_name = katja -pkg_katja_description = A simple Riemann client written in Erlang. -pkg_katja_homepage = https://github.com/nifoc/katja -pkg_katja_fetch = git -pkg_katja_repo = https://github.com/nifoc/katja -pkg_katja_commit = master - -PACKAGES += kdht -pkg_kdht_name = kdht -pkg_kdht_description = kdht is an erlang DHT implementation -pkg_kdht_homepage = https://github.com/kevinlynx/kdht -pkg_kdht_fetch = git -pkg_kdht_repo = https://github.com/kevinlynx/kdht -pkg_kdht_commit = master - -PACKAGES += key2value -pkg_key2value_name = key2value -pkg_key2value_description = Erlang 2-way map -pkg_key2value_homepage = https://github.com/okeuday/key2value -pkg_key2value_fetch = git -pkg_key2value_repo = https://github.com/okeuday/key2value -pkg_key2value_commit = master - -PACKAGES += keys1value -pkg_keys1value_name = keys1value -pkg_keys1value_description = Erlang set associative map for key lists -pkg_keys1value_homepage = https://github.com/okeuday/keys1value -pkg_keys1value_fetch = git -pkg_keys1value_repo = https://github.com/okeuday/keys1value -pkg_keys1value_commit = master - -PACKAGES += kinetic -pkg_kinetic_name = kinetic -pkg_kinetic_description = Erlang Kinesis Client -pkg_kinetic_homepage = https://github.com/AdRoll/kinetic -pkg_kinetic_fetch = git -pkg_kinetic_repo = https://github.com/AdRoll/kinetic -pkg_kinetic_commit = master - -PACKAGES += kjell -pkg_kjell_name = kjell -pkg_kjell_description = Erlang Shell -pkg_kjell_homepage = https://github.com/karlll/kjell -pkg_kjell_fetch = git -pkg_kjell_repo = https://github.com/karlll/kjell -pkg_kjell_commit = master - -PACKAGES += kraken -pkg_kraken_name = kraken -pkg_kraken_description = Distributed Pubsub Server for Realtime Apps -pkg_kraken_homepage = https://github.com/Asana/kraken -pkg_kraken_fetch = git -pkg_kraken_repo = https://github.com/Asana/kraken -pkg_kraken_commit = master - -PACKAGES += kucumberl -pkg_kucumberl_name = kucumberl -pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber -pkg_kucumberl_homepage = https://github.com/openshine/kucumberl -pkg_kucumberl_fetch = git -pkg_kucumberl_repo = https://github.com/openshine/kucumberl -pkg_kucumberl_commit = master - -PACKAGES += kvc -pkg_kvc_name = kvc -pkg_kvc_description = KVC - Key Value Coding for Erlang data structures -pkg_kvc_homepage = https://github.com/etrepum/kvc -pkg_kvc_fetch = git -pkg_kvc_repo = https://github.com/etrepum/kvc -pkg_kvc_commit = master - -PACKAGES += kvlists -pkg_kvlists_name = kvlists -pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang -pkg_kvlists_homepage = https://github.com/jcomellas/kvlists -pkg_kvlists_fetch = git -pkg_kvlists_repo = https://github.com/jcomellas/kvlists -pkg_kvlists_commit = master - -PACKAGES += kvs -pkg_kvs_name = kvs -pkg_kvs_description = Container and Iterator -pkg_kvs_homepage = https://github.com/synrc/kvs -pkg_kvs_fetch = git -pkg_kvs_repo = https://github.com/synrc/kvs -pkg_kvs_commit = master - -PACKAGES += lager -pkg_lager_name = lager -pkg_lager_description = A logging framework for Erlang/OTP. -pkg_lager_homepage = https://github.com/erlang-lager/lager -pkg_lager_fetch = git -pkg_lager_repo = https://github.com/erlang-lager/lager -pkg_lager_commit = master - -PACKAGES += lager_amqp_backend -pkg_lager_amqp_backend_name = lager_amqp_backend -pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend -pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend -pkg_lager_amqp_backend_fetch = git -pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend -pkg_lager_amqp_backend_commit = master - -PACKAGES += lager_syslog -pkg_lager_syslog_name = lager_syslog -pkg_lager_syslog_description = Syslog backend for lager -pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog -pkg_lager_syslog_fetch = git -pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog -pkg_lager_syslog_commit = master - -PACKAGES += lambdapad -pkg_lambdapad_name = lambdapad -pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. -pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad -pkg_lambdapad_fetch = git -pkg_lambdapad_repo = https://github.com/gar1t/lambdapad -pkg_lambdapad_commit = master - -PACKAGES += lasp -pkg_lasp_name = lasp -pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations -pkg_lasp_homepage = http://lasp-lang.org/ -pkg_lasp_fetch = git -pkg_lasp_repo = https://github.com/lasp-lang/lasp -pkg_lasp_commit = master - -PACKAGES += lasse -pkg_lasse_name = lasse -pkg_lasse_description = SSE handler for Cowboy -pkg_lasse_homepage = https://github.com/inaka/lasse -pkg_lasse_fetch = git -pkg_lasse_repo = https://github.com/inaka/lasse -pkg_lasse_commit = master - -PACKAGES += ldap -pkg_ldap_name = ldap -pkg_ldap_description = LDAP server written in Erlang -pkg_ldap_homepage = https://github.com/spawnproc/ldap -pkg_ldap_fetch = git -pkg_ldap_repo = https://github.com/spawnproc/ldap -pkg_ldap_commit = master - -PACKAGES += lethink -pkg_lethink_name = lethink -pkg_lethink_description = erlang driver for rethinkdb -pkg_lethink_homepage = https://github.com/taybin/lethink -pkg_lethink_fetch = git -pkg_lethink_repo = https://github.com/taybin/lethink -pkg_lethink_commit = master - -PACKAGES += lfe -pkg_lfe_name = lfe -pkg_lfe_description = Lisp Flavoured Erlang (LFE) -pkg_lfe_homepage = https://github.com/rvirding/lfe -pkg_lfe_fetch = git -pkg_lfe_repo = https://github.com/rvirding/lfe -pkg_lfe_commit = master - -PACKAGES += ling -pkg_ling_name = ling -pkg_ling_description = Erlang on Xen -pkg_ling_homepage = https://github.com/cloudozer/ling -pkg_ling_fetch = git -pkg_ling_repo = https://github.com/cloudozer/ling -pkg_ling_commit = master - -PACKAGES += live -pkg_live_name = live -pkg_live_description = Automated module and configuration reloader. -pkg_live_homepage = http://ninenines.eu -pkg_live_fetch = git -pkg_live_repo = https://github.com/ninenines/live -pkg_live_commit = master - -PACKAGES += lmq -pkg_lmq_name = lmq -pkg_lmq_description = Lightweight Message Queue -pkg_lmq_homepage = https://github.com/iij/lmq -pkg_lmq_fetch = git -pkg_lmq_repo = https://github.com/iij/lmq -pkg_lmq_commit = master - -PACKAGES += locker -pkg_locker_name = locker -pkg_locker_description = Atomic distributed 'check and set' for short-lived keys -pkg_locker_homepage = https://github.com/wooga/locker -pkg_locker_fetch = git -pkg_locker_repo = https://github.com/wooga/locker -pkg_locker_commit = master - -PACKAGES += locks -pkg_locks_name = locks -pkg_locks_description = A scalable, deadlock-resolving resource locker -pkg_locks_homepage = https://github.com/uwiger/locks -pkg_locks_fetch = git -pkg_locks_repo = https://github.com/uwiger/locks -pkg_locks_commit = master - -PACKAGES += log4erl -pkg_log4erl_name = log4erl -pkg_log4erl_description = A logger for erlang in the spirit of Log4J. -pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl -pkg_log4erl_fetch = git -pkg_log4erl_repo = https://github.com/ahmednawras/log4erl -pkg_log4erl_commit = master - -PACKAGES += lol -pkg_lol_name = lol -pkg_lol_description = Lisp on erLang, and programming is fun again -pkg_lol_homepage = https://github.com/b0oh/lol -pkg_lol_fetch = git -pkg_lol_repo = https://github.com/b0oh/lol -pkg_lol_commit = master - -PACKAGES += lucid -pkg_lucid_name = lucid -pkg_lucid_description = HTTP/2 server written in Erlang -pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid -pkg_lucid_fetch = git -pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid -pkg_lucid_commit = master - -PACKAGES += luerl -pkg_luerl_name = luerl -pkg_luerl_description = Lua in Erlang -pkg_luerl_homepage = https://github.com/rvirding/luerl -pkg_luerl_fetch = git -pkg_luerl_repo = https://github.com/rvirding/luerl -pkg_luerl_commit = develop - -PACKAGES += luwak -pkg_luwak_name = luwak -pkg_luwak_description = Large-object storage interface for Riak -pkg_luwak_homepage = https://github.com/basho/luwak -pkg_luwak_fetch = git -pkg_luwak_repo = https://github.com/basho/luwak -pkg_luwak_commit = master - -PACKAGES += lux -pkg_lux_name = lux -pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands -pkg_lux_homepage = https://github.com/hawk/lux -pkg_lux_fetch = git -pkg_lux_repo = https://github.com/hawk/lux -pkg_lux_commit = master - -PACKAGES += machi -pkg_machi_name = machi -pkg_machi_description = Machi file store -pkg_machi_homepage = https://github.com/basho/machi -pkg_machi_fetch = git -pkg_machi_repo = https://github.com/basho/machi -pkg_machi_commit = master - -PACKAGES += mad -pkg_mad_name = mad -pkg_mad_description = Small and Fast Rebar Replacement -pkg_mad_homepage = https://github.com/synrc/mad -pkg_mad_fetch = git -pkg_mad_repo = https://github.com/synrc/mad -pkg_mad_commit = master - -PACKAGES += marina -pkg_marina_name = marina -pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client -pkg_marina_homepage = https://github.com/lpgauth/marina -pkg_marina_fetch = git -pkg_marina_repo = https://github.com/lpgauth/marina -pkg_marina_commit = master - -PACKAGES += mavg -pkg_mavg_name = mavg -pkg_mavg_description = Erlang :: Exponential moving average library -pkg_mavg_homepage = https://github.com/EchoTeam/mavg -pkg_mavg_fetch = git -pkg_mavg_repo = https://github.com/EchoTeam/mavg -pkg_mavg_commit = master - -PACKAGES += mc_erl -pkg_mc_erl_name = mc_erl -pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. -pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl -pkg_mc_erl_fetch = git -pkg_mc_erl_repo = https://github.com/clonejo/mc-erl -pkg_mc_erl_commit = master - -PACKAGES += mcd -pkg_mcd_name = mcd -pkg_mcd_description = Fast memcached protocol client in pure Erlang -pkg_mcd_homepage = https://github.com/EchoTeam/mcd -pkg_mcd_fetch = git -pkg_mcd_repo = https://github.com/EchoTeam/mcd -pkg_mcd_commit = master - -PACKAGES += mcerlang -pkg_mcerlang_name = mcerlang -pkg_mcerlang_description = The McErlang model checker for Erlang -pkg_mcerlang_homepage = https://github.com/fredlund/McErlang -pkg_mcerlang_fetch = git -pkg_mcerlang_repo = https://github.com/fredlund/McErlang -pkg_mcerlang_commit = master - -PACKAGES += meck -pkg_meck_name = meck -pkg_meck_description = A mocking library for Erlang -pkg_meck_homepage = https://github.com/eproxus/meck -pkg_meck_fetch = git -pkg_meck_repo = https://github.com/eproxus/meck -pkg_meck_commit = master - -PACKAGES += mekao -pkg_mekao_name = mekao -pkg_mekao_description = SQL constructor -pkg_mekao_homepage = https://github.com/ddosia/mekao -pkg_mekao_fetch = git -pkg_mekao_repo = https://github.com/ddosia/mekao -pkg_mekao_commit = master - -PACKAGES += memo -pkg_memo_name = memo -pkg_memo_description = Erlang memoization server -pkg_memo_homepage = https://github.com/tuncer/memo -pkg_memo_fetch = git -pkg_memo_repo = https://github.com/tuncer/memo -pkg_memo_commit = master - -PACKAGES += merge_index -pkg_merge_index_name = merge_index -pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). -pkg_merge_index_homepage = https://github.com/basho/merge_index -pkg_merge_index_fetch = git -pkg_merge_index_repo = https://github.com/basho/merge_index -pkg_merge_index_commit = master - -PACKAGES += merl -pkg_merl_name = merl -pkg_merl_description = Metaprogramming in Erlang -pkg_merl_homepage = https://github.com/richcarl/merl -pkg_merl_fetch = git -pkg_merl_repo = https://github.com/richcarl/merl -pkg_merl_commit = master - -PACKAGES += mimerl -pkg_mimerl_name = mimerl -pkg_mimerl_description = library to handle mimetypes -pkg_mimerl_homepage = https://github.com/benoitc/mimerl -pkg_mimerl_fetch = git -pkg_mimerl_repo = https://github.com/benoitc/mimerl -pkg_mimerl_commit = master - -PACKAGES += mimetypes -pkg_mimetypes_name = mimetypes -pkg_mimetypes_description = Erlang MIME types library -pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes -pkg_mimetypes_fetch = git -pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes -pkg_mimetypes_commit = master - -PACKAGES += mixer -pkg_mixer_name = mixer -pkg_mixer_description = Mix in functions from other modules -pkg_mixer_homepage = https://github.com/chef/mixer -pkg_mixer_fetch = git -pkg_mixer_repo = https://github.com/chef/mixer -pkg_mixer_commit = master - -PACKAGES += mochiweb -pkg_mochiweb_name = mochiweb -pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. -pkg_mochiweb_homepage = https://github.com/mochi/mochiweb -pkg_mochiweb_fetch = git -pkg_mochiweb_repo = https://github.com/mochi/mochiweb -pkg_mochiweb_commit = master - -PACKAGES += mochiweb_xpath -pkg_mochiweb_xpath_name = mochiweb_xpath -pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser -pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath -pkg_mochiweb_xpath_fetch = git -pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath -pkg_mochiweb_xpath_commit = master - -PACKAGES += mockgyver -pkg_mockgyver_name = mockgyver -pkg_mockgyver_description = A mocking library for Erlang -pkg_mockgyver_homepage = https://github.com/klajo/mockgyver -pkg_mockgyver_fetch = git -pkg_mockgyver_repo = https://github.com/klajo/mockgyver -pkg_mockgyver_commit = master - -PACKAGES += modlib -pkg_modlib_name = modlib -pkg_modlib_description = Web framework based on Erlang's inets httpd -pkg_modlib_homepage = https://github.com/gar1t/modlib -pkg_modlib_fetch = git -pkg_modlib_repo = https://github.com/gar1t/modlib -pkg_modlib_commit = master - -PACKAGES += mongodb -pkg_mongodb_name = mongodb -pkg_mongodb_description = MongoDB driver for Erlang -pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang -pkg_mongodb_fetch = git -pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang -pkg_mongodb_commit = master - -PACKAGES += mongooseim -pkg_mongooseim_name = mongooseim -pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions -pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform -pkg_mongooseim_fetch = git -pkg_mongooseim_repo = https://github.com/esl/MongooseIM -pkg_mongooseim_commit = master - -PACKAGES += moyo -pkg_moyo_name = moyo -pkg_moyo_description = Erlang utility functions library -pkg_moyo_homepage = https://github.com/dwango/moyo -pkg_moyo_fetch = git -pkg_moyo_repo = https://github.com/dwango/moyo -pkg_moyo_commit = master - -PACKAGES += msgpack -pkg_msgpack_name = msgpack -pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang -pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang -pkg_msgpack_fetch = git -pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang -pkg_msgpack_commit = master - -PACKAGES += mu2 -pkg_mu2_name = mu2 -pkg_mu2_description = Erlang mutation testing tool -pkg_mu2_homepage = https://github.com/ramsay-t/mu2 -pkg_mu2_fetch = git -pkg_mu2_repo = https://github.com/ramsay-t/mu2 -pkg_mu2_commit = master - -PACKAGES += mustache -pkg_mustache_name = mustache -pkg_mustache_description = Mustache template engine for Erlang. -pkg_mustache_homepage = https://github.com/mojombo/mustache.erl -pkg_mustache_fetch = git -pkg_mustache_repo = https://github.com/mojombo/mustache.erl -pkg_mustache_commit = master - -PACKAGES += myproto -pkg_myproto_name = myproto -pkg_myproto_description = MySQL Server Protocol in Erlang -pkg_myproto_homepage = https://github.com/altenwald/myproto -pkg_myproto_fetch = git -pkg_myproto_repo = https://github.com/altenwald/myproto -pkg_myproto_commit = master - -PACKAGES += mysql -pkg_mysql_name = mysql -pkg_mysql_description = MySQL client library for Erlang/OTP -pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp -pkg_mysql_fetch = git -pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp -pkg_mysql_commit = 1.5.1 - -PACKAGES += n2o -pkg_n2o_name = n2o -pkg_n2o_description = WebSocket Application Server -pkg_n2o_homepage = https://github.com/5HT/n2o -pkg_n2o_fetch = git -pkg_n2o_repo = https://github.com/5HT/n2o -pkg_n2o_commit = master - -PACKAGES += nat_upnp -pkg_nat_upnp_name = nat_upnp -pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD -pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp -pkg_nat_upnp_fetch = git -pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp -pkg_nat_upnp_commit = master - -PACKAGES += neo4j -pkg_neo4j_name = neo4j -pkg_neo4j_description = Erlang client library for Neo4J. -pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang -pkg_neo4j_fetch = git -pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang -pkg_neo4j_commit = master - -PACKAGES += neotoma -pkg_neotoma_name = neotoma -pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. -pkg_neotoma_homepage = https://github.com/seancribbs/neotoma -pkg_neotoma_fetch = git -pkg_neotoma_repo = https://github.com/seancribbs/neotoma -pkg_neotoma_commit = master - -PACKAGES += newrelic -pkg_newrelic_name = newrelic -pkg_newrelic_description = Erlang library for sending metrics to New Relic -pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang -pkg_newrelic_fetch = git -pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang -pkg_newrelic_commit = master - -PACKAGES += nifty -pkg_nifty_name = nifty -pkg_nifty_description = Erlang NIF wrapper generator -pkg_nifty_homepage = https://github.com/parapluu/nifty -pkg_nifty_fetch = git -pkg_nifty_repo = https://github.com/parapluu/nifty -pkg_nifty_commit = master - -PACKAGES += nitrogen_core -pkg_nitrogen_core_name = nitrogen_core -pkg_nitrogen_core_description = The core Nitrogen library. -pkg_nitrogen_core_homepage = http://nitrogenproject.com/ -pkg_nitrogen_core_fetch = git -pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core -pkg_nitrogen_core_commit = master - -PACKAGES += nkbase -pkg_nkbase_name = nkbase -pkg_nkbase_description = NkBASE distributed database -pkg_nkbase_homepage = https://github.com/Nekso/nkbase -pkg_nkbase_fetch = git -pkg_nkbase_repo = https://github.com/Nekso/nkbase -pkg_nkbase_commit = develop - -PACKAGES += nkdocker -pkg_nkdocker_name = nkdocker -pkg_nkdocker_description = Erlang Docker client -pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker -pkg_nkdocker_fetch = git -pkg_nkdocker_repo = https://github.com/Nekso/nkdocker -pkg_nkdocker_commit = master - -PACKAGES += nkpacket -pkg_nkpacket_name = nkpacket -pkg_nkpacket_description = Generic Erlang transport layer -pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket -pkg_nkpacket_fetch = git -pkg_nkpacket_repo = https://github.com/Nekso/nkpacket -pkg_nkpacket_commit = master - -PACKAGES += nksip -pkg_nksip_name = nksip -pkg_nksip_description = Erlang SIP application server -pkg_nksip_homepage = https://github.com/kalta/nksip -pkg_nksip_fetch = git -pkg_nksip_repo = https://github.com/kalta/nksip -pkg_nksip_commit = master - -PACKAGES += nodefinder -pkg_nodefinder_name = nodefinder -pkg_nodefinder_description = automatic node discovery via UDP multicast -pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder -pkg_nodefinder_fetch = git -pkg_nodefinder_repo = https://github.com/okeuday/nodefinder -pkg_nodefinder_commit = master - -PACKAGES += nprocreg -pkg_nprocreg_name = nprocreg -pkg_nprocreg_description = Minimal Distributed Erlang Process Registry -pkg_nprocreg_homepage = http://nitrogenproject.com/ -pkg_nprocreg_fetch = git -pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg -pkg_nprocreg_commit = master - -PACKAGES += oauth -pkg_oauth_name = oauth -pkg_oauth_description = An Erlang OAuth 1.0 implementation -pkg_oauth_homepage = https://github.com/tim/erlang-oauth -pkg_oauth_fetch = git -pkg_oauth_repo = https://github.com/tim/erlang-oauth -pkg_oauth_commit = master - -PACKAGES += oauth2 -pkg_oauth2_name = oauth2 -pkg_oauth2_description = Erlang Oauth2 implementation -pkg_oauth2_homepage = https://github.com/kivra/oauth2 -pkg_oauth2_fetch = git -pkg_oauth2_repo = https://github.com/kivra/oauth2 -pkg_oauth2_commit = master - -PACKAGES += observer_cli -pkg_observer_cli_name = observer_cli -pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line -pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli -pkg_observer_cli_fetch = git -pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli -pkg_observer_cli_commit = master - -PACKAGES += octopus -pkg_octopus_name = octopus -pkg_octopus_description = Small and flexible pool manager written in Erlang -pkg_octopus_homepage = https://github.com/erlangbureau/octopus -pkg_octopus_fetch = git -pkg_octopus_repo = https://github.com/erlangbureau/octopus -pkg_octopus_commit = master - -PACKAGES += of_protocol -pkg_of_protocol_name = of_protocol -pkg_of_protocol_description = OpenFlow Protocol Library for Erlang -pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol -pkg_of_protocol_fetch = git -pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol -pkg_of_protocol_commit = master - -PACKAGES += opencouch -pkg_opencouch_name = couch -pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB -pkg_opencouch_homepage = https://github.com/benoitc/opencouch -pkg_opencouch_fetch = git -pkg_opencouch_repo = https://github.com/benoitc/opencouch -pkg_opencouch_commit = master - -PACKAGES += openflow -pkg_openflow_name = openflow -pkg_openflow_description = An OpenFlow controller written in pure erlang -pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow -pkg_openflow_fetch = git -pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow -pkg_openflow_commit = master - -PACKAGES += openid -pkg_openid_name = openid -pkg_openid_description = Erlang OpenID -pkg_openid_homepage = https://github.com/brendonh/erl_openid -pkg_openid_fetch = git -pkg_openid_repo = https://github.com/brendonh/erl_openid -pkg_openid_commit = master - -PACKAGES += openpoker -pkg_openpoker_name = openpoker -pkg_openpoker_description = Genesis Texas hold'em Game Server -pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker -pkg_openpoker_fetch = git -pkg_openpoker_repo = https://github.com/hpyhacking/openpoker -pkg_openpoker_commit = master - -PACKAGES += otpbp -pkg_otpbp_name = otpbp -pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19) -pkg_otpbp_homepage = https://github.com/Ledest/otpbp -pkg_otpbp_fetch = git -pkg_otpbp_repo = https://github.com/Ledest/otpbp -pkg_otpbp_commit = master - -PACKAGES += pal -pkg_pal_name = pal -pkg_pal_description = Pragmatic Authentication Library -pkg_pal_homepage = https://github.com/manifest/pal -pkg_pal_fetch = git -pkg_pal_repo = https://github.com/manifest/pal -pkg_pal_commit = master - -PACKAGES += parse_trans -pkg_parse_trans_name = parse_trans -pkg_parse_trans_description = Parse transform utilities for Erlang -pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans -pkg_parse_trans_fetch = git -pkg_parse_trans_repo = https://github.com/uwiger/parse_trans -pkg_parse_trans_commit = master - -PACKAGES += parsexml -pkg_parsexml_name = parsexml -pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API -pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml -pkg_parsexml_fetch = git -pkg_parsexml_repo = https://github.com/maxlapshin/parsexml -pkg_parsexml_commit = master - -PACKAGES += partisan -pkg_partisan_name = partisan -pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir. -pkg_partisan_homepage = http://partisan.cloud -pkg_partisan_fetch = git -pkg_partisan_repo = https://github.com/lasp-lang/partisan -pkg_partisan_commit = master - -PACKAGES += pegjs -pkg_pegjs_name = pegjs -pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. -pkg_pegjs_homepage = https://github.com/dmitriid/pegjs -pkg_pegjs_fetch = git -pkg_pegjs_repo = https://github.com/dmitriid/pegjs -pkg_pegjs_commit = master - -PACKAGES += percept2 -pkg_percept2_name = percept2 -pkg_percept2_description = Concurrent profiling tool for Erlang -pkg_percept2_homepage = https://github.com/huiqing/percept2 -pkg_percept2_fetch = git -pkg_percept2_repo = https://github.com/huiqing/percept2 -pkg_percept2_commit = master - -PACKAGES += pgo -pkg_pgo_name = pgo -pkg_pgo_description = Erlang Postgres client and connection pool -pkg_pgo_homepage = https://github.com/erleans/pgo.git -pkg_pgo_fetch = git -pkg_pgo_repo = https://github.com/erleans/pgo.git -pkg_pgo_commit = master - -PACKAGES += pgsql -pkg_pgsql_name = pgsql -pkg_pgsql_description = Erlang PostgreSQL driver -pkg_pgsql_homepage = https://github.com/semiocast/pgsql -pkg_pgsql_fetch = git -pkg_pgsql_repo = https://github.com/semiocast/pgsql -pkg_pgsql_commit = master - -PACKAGES += pkgx -pkg_pkgx_name = pkgx -pkg_pkgx_description = Build .deb packages from Erlang releases -pkg_pkgx_homepage = https://github.com/arjan/pkgx -pkg_pkgx_fetch = git -pkg_pkgx_repo = https://github.com/arjan/pkgx -pkg_pkgx_commit = master - -PACKAGES += pkt -pkg_pkt_name = pkt -pkg_pkt_description = Erlang network protocol library -pkg_pkt_homepage = https://github.com/msantos/pkt -pkg_pkt_fetch = git -pkg_pkt_repo = https://github.com/msantos/pkt -pkg_pkt_commit = master - -PACKAGES += plain_fsm -pkg_plain_fsm_name = plain_fsm -pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. -pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm -pkg_plain_fsm_fetch = git -pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm -pkg_plain_fsm_commit = master - -PACKAGES += plumtree -pkg_plumtree_name = plumtree -pkg_plumtree_description = Epidemic Broadcast Trees -pkg_plumtree_homepage = https://github.com/helium/plumtree -pkg_plumtree_fetch = git -pkg_plumtree_repo = https://github.com/helium/plumtree -pkg_plumtree_commit = master - -PACKAGES += pmod_transform -pkg_pmod_transform_name = pmod_transform -pkg_pmod_transform_description = Parse transform for parameterized modules -pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform -pkg_pmod_transform_fetch = git -pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform -pkg_pmod_transform_commit = master - -PACKAGES += pobox -pkg_pobox_name = pobox -pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang -pkg_pobox_homepage = https://github.com/ferd/pobox -pkg_pobox_fetch = git -pkg_pobox_repo = https://github.com/ferd/pobox -pkg_pobox_commit = master - -PACKAGES += ponos -pkg_ponos_name = ponos -pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang -pkg_ponos_homepage = https://github.com/klarna/ponos -pkg_ponos_fetch = git -pkg_ponos_repo = https://github.com/klarna/ponos -pkg_ponos_commit = master - -PACKAGES += poolboy -pkg_poolboy_name = poolboy -pkg_poolboy_description = A hunky Erlang worker pool factory -pkg_poolboy_homepage = https://github.com/devinus/poolboy -pkg_poolboy_fetch = git -pkg_poolboy_repo = https://github.com/devinus/poolboy -pkg_poolboy_commit = master - -PACKAGES += pooler -pkg_pooler_name = pooler -pkg_pooler_description = An OTP Process Pool Application -pkg_pooler_homepage = https://github.com/seth/pooler -pkg_pooler_fetch = git -pkg_pooler_repo = https://github.com/seth/pooler -pkg_pooler_commit = master - -PACKAGES += pqueue -pkg_pqueue_name = pqueue -pkg_pqueue_description = Erlang Priority Queues -pkg_pqueue_homepage = https://github.com/okeuday/pqueue -pkg_pqueue_fetch = git -pkg_pqueue_repo = https://github.com/okeuday/pqueue -pkg_pqueue_commit = master - -PACKAGES += procket -pkg_procket_name = procket -pkg_procket_description = Erlang interface to low level socket operations -pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket -pkg_procket_fetch = git -pkg_procket_repo = https://github.com/msantos/procket -pkg_procket_commit = master - -PACKAGES += prometheus -pkg_prometheus_name = prometheus -pkg_prometheus_description = Prometheus.io client in Erlang -pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl -pkg_prometheus_fetch = git -pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl -pkg_prometheus_commit = master - -PACKAGES += prop -pkg_prop_name = prop -pkg_prop_description = An Erlang code scaffolding and generator system. -pkg_prop_homepage = https://github.com/nuex/prop -pkg_prop_fetch = git -pkg_prop_repo = https://github.com/nuex/prop -pkg_prop_commit = master +PACKAGES += hex_core +pkg_hex_core_name = hex_core +pkg_hex_core_description = Reference implementation of Hex specifications +pkg_hex_core_homepage = https://github.com/hexpm/hex_core +pkg_hex_core_fetch = git +HEX_CORE_GIT ?= https://github.com/hexpm/hex_core +pkg_hex_core_repo = $(HEX_CORE_GIT) +pkg_hex_core_commit = e57b4fb15cde710b3ae09b1d18f148f6999a63cc PACKAGES += proper pkg_proper_name = proper @@ -3185,205 +356,13 @@ pkg_proper_fetch = git pkg_proper_repo = https://github.com/manopapad/proper pkg_proper_commit = master -PACKAGES += props -pkg_props_name = props -pkg_props_description = Property structure library -pkg_props_homepage = https://github.com/greyarea/props -pkg_props_fetch = git -pkg_props_repo = https://github.com/greyarea/props -pkg_props_commit = master - -PACKAGES += protobuffs -pkg_protobuffs_name = protobuffs -pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. -pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs -pkg_protobuffs_fetch = git -pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs -pkg_protobuffs_commit = master - -PACKAGES += psycho -pkg_psycho_name = psycho -pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. -pkg_psycho_homepage = https://github.com/gar1t/psycho -pkg_psycho_fetch = git -pkg_psycho_repo = https://github.com/gar1t/psycho -pkg_psycho_commit = master - -PACKAGES += purity -pkg_purity_name = purity -pkg_purity_description = A side-effect analyzer for Erlang -pkg_purity_homepage = https://github.com/mpitid/purity -pkg_purity_fetch = git -pkg_purity_repo = https://github.com/mpitid/purity -pkg_purity_commit = master - -PACKAGES += push_service -pkg_push_service_name = push_service -pkg_push_service_description = Push service -pkg_push_service_homepage = https://github.com/hairyhum/push_service -pkg_push_service_fetch = git -pkg_push_service_repo = https://github.com/hairyhum/push_service -pkg_push_service_commit = master - -PACKAGES += qdate -pkg_qdate_name = qdate -pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. -pkg_qdate_homepage = https://github.com/choptastic/qdate -pkg_qdate_fetch = git -pkg_qdate_repo = https://github.com/choptastic/qdate -pkg_qdate_commit = master - -PACKAGES += qrcode -pkg_qrcode_name = qrcode -pkg_qrcode_description = QR Code encoder in Erlang -pkg_qrcode_homepage = https://github.com/komone/qrcode -pkg_qrcode_fetch = git -pkg_qrcode_repo = https://github.com/komone/qrcode -pkg_qrcode_commit = master - -PACKAGES += quest -pkg_quest_name = quest -pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. -pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest -pkg_quest_fetch = git -pkg_quest_repo = https://github.com/eriksoe/ErlangQuest -pkg_quest_commit = master - -PACKAGES += quickrand -pkg_quickrand_name = quickrand -pkg_quickrand_description = Quick Erlang Random Number Generation -pkg_quickrand_homepage = https://github.com/okeuday/quickrand -pkg_quickrand_fetch = git -pkg_quickrand_repo = https://github.com/okeuday/quickrand -pkg_quickrand_commit = master - -PACKAGES += rabbit -pkg_rabbit_name = rabbit -pkg_rabbit_description = RabbitMQ Server -pkg_rabbit_homepage = https://www.rabbitmq.com/ -pkg_rabbit_fetch = git -pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git -pkg_rabbit_commit = master - -PACKAGES += rabbit_exchange_type_riak -pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak -pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak -pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange -pkg_rabbit_exchange_type_riak_fetch = git -pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange -pkg_rabbit_exchange_type_riak_commit = master - -PACKAGES += rack -pkg_rack_name = rack -pkg_rack_description = Rack handler for erlang -pkg_rack_homepage = https://github.com/erlyvideo/rack -pkg_rack_fetch = git -pkg_rack_repo = https://github.com/erlyvideo/rack -pkg_rack_commit = master - -PACKAGES += radierl -pkg_radierl_name = radierl -pkg_radierl_description = RADIUS protocol stack implemented in Erlang. -pkg_radierl_homepage = https://github.com/vances/radierl -pkg_radierl_fetch = git -pkg_radierl_repo = https://github.com/vances/radierl -pkg_radierl_commit = master - -PACKAGES += rafter -pkg_rafter_name = rafter -pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol -pkg_rafter_homepage = https://github.com/andrewjstone/rafter -pkg_rafter_fetch = git -pkg_rafter_repo = https://github.com/andrewjstone/rafter -pkg_rafter_commit = master - PACKAGES += ranch pkg_ranch_name = ranch pkg_ranch_description = Socket acceptor pool for TCP protocols. pkg_ranch_homepage = http://ninenines.eu pkg_ranch_fetch = git pkg_ranch_repo = https://github.com/ninenines/ranch -pkg_ranch_commit = 1.2.1 - -PACKAGES += rbeacon -pkg_rbeacon_name = rbeacon -pkg_rbeacon_description = LAN discovery and presence in Erlang. -pkg_rbeacon_homepage = https://github.com/refuge/rbeacon -pkg_rbeacon_fetch = git -pkg_rbeacon_repo = https://github.com/refuge/rbeacon -pkg_rbeacon_commit = master - -PACKAGES += rebar -pkg_rebar_name = rebar -pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. -pkg_rebar_homepage = http://www.rebar3.org -pkg_rebar_fetch = git -pkg_rebar_repo = https://github.com/rebar/rebar3 -pkg_rebar_commit = master - -PACKAGES += rebus -pkg_rebus_name = rebus -pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. -pkg_rebus_homepage = https://github.com/olle/rebus -pkg_rebus_fetch = git -pkg_rebus_repo = https://github.com/olle/rebus -pkg_rebus_commit = master - -PACKAGES += rec2json -pkg_rec2json_name = rec2json -pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. -pkg_rec2json_homepage = https://github.com/lordnull/rec2json -pkg_rec2json_fetch = git -pkg_rec2json_repo = https://github.com/lordnull/rec2json -pkg_rec2json_commit = master - -PACKAGES += recon -pkg_recon_name = recon -pkg_recon_description = Collection of functions and scripts to debug Erlang in production. -pkg_recon_homepage = https://github.com/ferd/recon -pkg_recon_fetch = git -pkg_recon_repo = https://github.com/ferd/recon -pkg_recon_commit = master - -PACKAGES += record_info -pkg_record_info_name = record_info -pkg_record_info_description = Convert between record and proplist -pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info -pkg_record_info_fetch = git -pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info -pkg_record_info_commit = master - -PACKAGES += redgrid -pkg_redgrid_name = redgrid -pkg_redgrid_description = automatic Erlang node discovery via redis -pkg_redgrid_homepage = https://github.com/jkvor/redgrid -pkg_redgrid_fetch = git -pkg_redgrid_repo = https://github.com/jkvor/redgrid -pkg_redgrid_commit = master - -PACKAGES += redo -pkg_redo_name = redo -pkg_redo_description = pipelined erlang redis client -pkg_redo_homepage = https://github.com/jkvor/redo -pkg_redo_fetch = git -pkg_redo_repo = https://github.com/jkvor/redo -pkg_redo_commit = master - -PACKAGES += reload_mk -pkg_reload_mk_name = reload_mk -pkg_reload_mk_description = Live reload plugin for erlang.mk. -pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk -pkg_reload_mk_fetch = git -pkg_reload_mk_repo = https://github.com/bullno1/reload.mk -pkg_reload_mk_commit = master - -PACKAGES += reltool_util -pkg_reltool_util_name = reltool_util -pkg_reltool_util_description = Erlang reltool utility functionality application -pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util -pkg_reltool_util_fetch = git -pkg_reltool_util_repo = https://github.com/okeuday/reltool_util -pkg_reltool_util_commit = master +pkg_ranch_commit = master PACKAGES += relx pkg_relx_name = relx @@ -3391,599 +370,7 @@ pkg_relx_description = Sane, simple release creation for Erlang pkg_relx_homepage = https://github.com/erlware/relx pkg_relx_fetch = git pkg_relx_repo = https://github.com/erlware/relx -pkg_relx_commit = master - -PACKAGES += resource_discovery -pkg_resource_discovery_name = resource_discovery -pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. -pkg_resource_discovery_homepage = http://erlware.org/ -pkg_resource_discovery_fetch = git -pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery -pkg_resource_discovery_commit = master - -PACKAGES += restc -pkg_restc_name = restc -pkg_restc_description = Erlang Rest Client -pkg_restc_homepage = https://github.com/kivra/restclient -pkg_restc_fetch = git -pkg_restc_repo = https://github.com/kivra/restclient -pkg_restc_commit = master - -PACKAGES += rfc4627_jsonrpc -pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc -pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. -pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 -pkg_rfc4627_jsonrpc_fetch = git -pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 -pkg_rfc4627_jsonrpc_commit = master - -PACKAGES += riak_control -pkg_riak_control_name = riak_control -pkg_riak_control_description = Webmachine-based administration interface for Riak. -pkg_riak_control_homepage = https://github.com/basho/riak_control -pkg_riak_control_fetch = git -pkg_riak_control_repo = https://github.com/basho/riak_control -pkg_riak_control_commit = master - -PACKAGES += riak_core -pkg_riak_core_name = riak_core -pkg_riak_core_description = Distributed systems infrastructure used by Riak. -pkg_riak_core_homepage = https://github.com/basho/riak_core -pkg_riak_core_fetch = git -pkg_riak_core_repo = https://github.com/basho/riak_core -pkg_riak_core_commit = master - -PACKAGES += riak_dt -pkg_riak_dt_name = riak_dt -pkg_riak_dt_description = Convergent replicated datatypes in Erlang -pkg_riak_dt_homepage = https://github.com/basho/riak_dt -pkg_riak_dt_fetch = git -pkg_riak_dt_repo = https://github.com/basho/riak_dt -pkg_riak_dt_commit = master - -PACKAGES += riak_ensemble -pkg_riak_ensemble_name = riak_ensemble -pkg_riak_ensemble_description = Multi-Paxos framework in Erlang -pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble -pkg_riak_ensemble_fetch = git -pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble -pkg_riak_ensemble_commit = master - -PACKAGES += riak_kv -pkg_riak_kv_name = riak_kv -pkg_riak_kv_description = Riak Key/Value Store -pkg_riak_kv_homepage = https://github.com/basho/riak_kv -pkg_riak_kv_fetch = git -pkg_riak_kv_repo = https://github.com/basho/riak_kv -pkg_riak_kv_commit = master - -PACKAGES += riak_pg -pkg_riak_pg_name = riak_pg -pkg_riak_pg_description = Distributed process groups with riak_core. -pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg -pkg_riak_pg_fetch = git -pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg -pkg_riak_pg_commit = master - -PACKAGES += riak_pipe -pkg_riak_pipe_name = riak_pipe -pkg_riak_pipe_description = Riak Pipelines -pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe -pkg_riak_pipe_fetch = git -pkg_riak_pipe_repo = https://github.com/basho/riak_pipe -pkg_riak_pipe_commit = master - -PACKAGES += riak_sysmon -pkg_riak_sysmon_name = riak_sysmon -pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages -pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon -pkg_riak_sysmon_fetch = git -pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon -pkg_riak_sysmon_commit = master - -PACKAGES += riak_test -pkg_riak_test_name = riak_test -pkg_riak_test_description = I'm in your cluster, testing your riaks -pkg_riak_test_homepage = https://github.com/basho/riak_test -pkg_riak_test_fetch = git -pkg_riak_test_repo = https://github.com/basho/riak_test -pkg_riak_test_commit = master - -PACKAGES += riakc -pkg_riakc_name = riakc -pkg_riakc_description = Erlang clients for Riak. -pkg_riakc_homepage = https://github.com/basho/riak-erlang-client -pkg_riakc_fetch = git -pkg_riakc_repo = https://github.com/basho/riak-erlang-client -pkg_riakc_commit = master - -PACKAGES += riakhttpc -pkg_riakhttpc_name = riakhttpc -pkg_riakhttpc_description = Riak Erlang client using the HTTP interface -pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client -pkg_riakhttpc_fetch = git -pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client -pkg_riakhttpc_commit = master - -PACKAGES += riaknostic -pkg_riaknostic_name = riaknostic -pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap -pkg_riaknostic_homepage = https://github.com/basho/riaknostic -pkg_riaknostic_fetch = git -pkg_riaknostic_repo = https://github.com/basho/riaknostic -pkg_riaknostic_commit = master - -PACKAGES += riakpool -pkg_riakpool_name = riakpool -pkg_riakpool_description = erlang riak client pool -pkg_riakpool_homepage = https://github.com/dweldon/riakpool -pkg_riakpool_fetch = git -pkg_riakpool_repo = https://github.com/dweldon/riakpool -pkg_riakpool_commit = master - -PACKAGES += rivus_cep -pkg_rivus_cep_name = rivus_cep -pkg_rivus_cep_description = Complex event processing in Erlang -pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep -pkg_rivus_cep_fetch = git -pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep -pkg_rivus_cep_commit = master - -PACKAGES += rlimit -pkg_rlimit_name = rlimit -pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent -pkg_rlimit_homepage = https://github.com/jlouis/rlimit -pkg_rlimit_fetch = git -pkg_rlimit_repo = https://github.com/jlouis/rlimit -pkg_rlimit_commit = master - -PACKAGES += rust_mk -pkg_rust_mk_name = rust_mk -pkg_rust_mk_description = Build Rust crates in an Erlang application -pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk -pkg_rust_mk_fetch = git -pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk -pkg_rust_mk_commit = master - -PACKAGES += safetyvalve -pkg_safetyvalve_name = safetyvalve -pkg_safetyvalve_description = A safety valve for your erlang node -pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve -pkg_safetyvalve_fetch = git -pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve -pkg_safetyvalve_commit = master - -PACKAGES += seestar -pkg_seestar_name = seestar -pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol -pkg_seestar_homepage = https://github.com/iamaleksey/seestar -pkg_seestar_fetch = git -pkg_seestar_repo = https://github.com/iamaleksey/seestar -pkg_seestar_commit = master - -PACKAGES += service -pkg_service_name = service -pkg_service_description = A minimal Erlang behavior for creating CloudI internal services -pkg_service_homepage = http://cloudi.org/ -pkg_service_fetch = git -pkg_service_repo = https://github.com/CloudI/service -pkg_service_commit = master - -PACKAGES += setup -pkg_setup_name = setup -pkg_setup_description = Generic setup utility for Erlang-based systems -pkg_setup_homepage = https://github.com/uwiger/setup -pkg_setup_fetch = git -pkg_setup_repo = https://github.com/uwiger/setup -pkg_setup_commit = master - -PACKAGES += sext -pkg_sext_name = sext -pkg_sext_description = Sortable Erlang Term Serialization -pkg_sext_homepage = https://github.com/uwiger/sext -pkg_sext_fetch = git -pkg_sext_repo = https://github.com/uwiger/sext -pkg_sext_commit = master - -PACKAGES += sfmt -pkg_sfmt_name = sfmt -pkg_sfmt_description = SFMT pseudo random number generator for Erlang. -pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang -pkg_sfmt_fetch = git -pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang -pkg_sfmt_commit = master - -PACKAGES += sgte -pkg_sgte_name = sgte -pkg_sgte_description = A simple Erlang Template Engine -pkg_sgte_homepage = https://github.com/filippo/sgte -pkg_sgte_fetch = git -pkg_sgte_repo = https://github.com/filippo/sgte -pkg_sgte_commit = master - -PACKAGES += sheriff -pkg_sheriff_name = sheriff -pkg_sheriff_description = Parse transform for type based validation. -pkg_sheriff_homepage = http://ninenines.eu -pkg_sheriff_fetch = git -pkg_sheriff_repo = https://github.com/extend/sheriff -pkg_sheriff_commit = master - -PACKAGES += shotgun -pkg_shotgun_name = shotgun -pkg_shotgun_description = better than just a gun -pkg_shotgun_homepage = https://github.com/inaka/shotgun -pkg_shotgun_fetch = git -pkg_shotgun_repo = https://github.com/inaka/shotgun -pkg_shotgun_commit = master - -PACKAGES += sidejob -pkg_sidejob_name = sidejob -pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang -pkg_sidejob_homepage = https://github.com/basho/sidejob -pkg_sidejob_fetch = git -pkg_sidejob_repo = https://github.com/basho/sidejob -pkg_sidejob_commit = master - -PACKAGES += sieve -pkg_sieve_name = sieve -pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang -pkg_sieve_homepage = https://github.com/benoitc/sieve -pkg_sieve_fetch = git -pkg_sieve_repo = https://github.com/benoitc/sieve -pkg_sieve_commit = master - -PACKAGES += sighandler -pkg_sighandler_name = sighandler -pkg_sighandler_description = Handle UNIX signals in Er lang -pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler -pkg_sighandler_fetch = git -pkg_sighandler_repo = https://github.com/jkingsbery/sighandler -pkg_sighandler_commit = master - -PACKAGES += simhash -pkg_simhash_name = simhash -pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. -pkg_simhash_homepage = https://github.com/ferd/simhash -pkg_simhash_fetch = git -pkg_simhash_repo = https://github.com/ferd/simhash -pkg_simhash_commit = master - -PACKAGES += simple_bridge -pkg_simple_bridge_name = simple_bridge -pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. -pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge -pkg_simple_bridge_fetch = git -pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge -pkg_simple_bridge_commit = master - -PACKAGES += simple_oauth2 -pkg_simple_oauth2_name = simple_oauth2 -pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) -pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 -pkg_simple_oauth2_fetch = git -pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 -pkg_simple_oauth2_commit = master - -PACKAGES += skel -pkg_skel_name = skel -pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang -pkg_skel_homepage = https://github.com/ParaPhrase/skel -pkg_skel_fetch = git -pkg_skel_repo = https://github.com/ParaPhrase/skel -pkg_skel_commit = master - -PACKAGES += slack -pkg_slack_name = slack -pkg_slack_description = Minimal slack notification OTP library. -pkg_slack_homepage = https://github.com/DonBranson/slack -pkg_slack_fetch = git -pkg_slack_repo = https://github.com/DonBranson/slack.git -pkg_slack_commit = master - -PACKAGES += smother -pkg_smother_name = smother -pkg_smother_description = Extended code coverage metrics for Erlang. -pkg_smother_homepage = https://ramsay-t.github.io/Smother/ -pkg_smother_fetch = git -pkg_smother_repo = https://github.com/ramsay-t/Smother -pkg_smother_commit = master - -PACKAGES += snappyer -pkg_snappyer_name = snappyer -pkg_snappyer_description = Snappy as nif for Erlang -pkg_snappyer_homepage = https://github.com/zmstone/snappyer -pkg_snappyer_fetch = git -pkg_snappyer_repo = https://github.com/zmstone/snappyer.git -pkg_snappyer_commit = master - -PACKAGES += social -pkg_social_name = social -pkg_social_description = Cowboy handler for social login via OAuth2 providers -pkg_social_homepage = https://github.com/dvv/social -pkg_social_fetch = git -pkg_social_repo = https://github.com/dvv/social -pkg_social_commit = master - -PACKAGES += spapi_router -pkg_spapi_router_name = spapi_router -pkg_spapi_router_description = Partially-connected Erlang clustering -pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router -pkg_spapi_router_fetch = git -pkg_spapi_router_repo = https://github.com/spilgames/spapi-router -pkg_spapi_router_commit = master - -PACKAGES += sqerl -pkg_sqerl_name = sqerl -pkg_sqerl_description = An Erlang-flavoured SQL DSL -pkg_sqerl_homepage = https://github.com/hairyhum/sqerl -pkg_sqerl_fetch = git -pkg_sqerl_repo = https://github.com/hairyhum/sqerl -pkg_sqerl_commit = master - -PACKAGES += srly -pkg_srly_name = srly -pkg_srly_description = Native Erlang Unix serial interface -pkg_srly_homepage = https://github.com/msantos/srly -pkg_srly_fetch = git -pkg_srly_repo = https://github.com/msantos/srly -pkg_srly_commit = master - -PACKAGES += sshrpc -pkg_sshrpc_name = sshrpc -pkg_sshrpc_description = Erlang SSH RPC module (experimental) -pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc -pkg_sshrpc_fetch = git -pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc -pkg_sshrpc_commit = master - -PACKAGES += stable -pkg_stable_name = stable -pkg_stable_description = Library of assorted helpers for Cowboy web server. -pkg_stable_homepage = https://github.com/dvv/stable -pkg_stable_fetch = git -pkg_stable_repo = https://github.com/dvv/stable -pkg_stable_commit = master - -PACKAGES += statebox -pkg_statebox_name = statebox -pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. -pkg_statebox_homepage = https://github.com/mochi/statebox -pkg_statebox_fetch = git -pkg_statebox_repo = https://github.com/mochi/statebox -pkg_statebox_commit = master - -PACKAGES += statebox_riak -pkg_statebox_riak_name = statebox_riak -pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. -pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak -pkg_statebox_riak_fetch = git -pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak -pkg_statebox_riak_commit = master - -PACKAGES += statman -pkg_statman_name = statman -pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM -pkg_statman_homepage = https://github.com/knutin/statman -pkg_statman_fetch = git -pkg_statman_repo = https://github.com/knutin/statman -pkg_statman_commit = master - -PACKAGES += statsderl -pkg_statsderl_name = statsderl -pkg_statsderl_description = StatsD client (erlang) -pkg_statsderl_homepage = https://github.com/lpgauth/statsderl -pkg_statsderl_fetch = git -pkg_statsderl_repo = https://github.com/lpgauth/statsderl -pkg_statsderl_commit = master - -PACKAGES += stdinout_pool -pkg_stdinout_pool_name = stdinout_pool -pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. -pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool -pkg_stdinout_pool_fetch = git -pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool -pkg_stdinout_pool_commit = master - -PACKAGES += stockdb -pkg_stockdb_name = stockdb -pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang -pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb -pkg_stockdb_fetch = git -pkg_stockdb_repo = https://github.com/maxlapshin/stockdb -pkg_stockdb_commit = master - -PACKAGES += stripe -pkg_stripe_name = stripe -pkg_stripe_description = Erlang interface to the stripe.com API -pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang -pkg_stripe_fetch = git -pkg_stripe_repo = https://github.com/mattsta/stripe-erlang -pkg_stripe_commit = v1 - -PACKAGES += subproc -pkg_subproc_name = subproc -pkg_subproc_description = unix subprocess manager with {active,once|false} modes -pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc -pkg_subproc_fetch = git -pkg_subproc_repo = https://github.com/dozzie/subproc -pkg_subproc_commit = v0.1.0 - -PACKAGES += supervisor3 -pkg_supervisor3_name = supervisor3 -pkg_supervisor3_description = OTP supervisor with additional strategies -pkg_supervisor3_homepage = https://github.com/klarna/supervisor3 -pkg_supervisor3_fetch = git -pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git -pkg_supervisor3_commit = master - -PACKAGES += surrogate -pkg_surrogate_name = surrogate -pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. -pkg_surrogate_homepage = https://github.com/skruger/Surrogate -pkg_surrogate_fetch = git -pkg_surrogate_repo = https://github.com/skruger/Surrogate -pkg_surrogate_commit = master - -PACKAGES += swab -pkg_swab_name = swab -pkg_swab_description = General purpose buffer handling module -pkg_swab_homepage = https://github.com/crownedgrouse/swab -pkg_swab_fetch = git -pkg_swab_repo = https://github.com/crownedgrouse/swab -pkg_swab_commit = master - -PACKAGES += swarm -pkg_swarm_name = swarm -pkg_swarm_description = Fast and simple acceptor pool for Erlang -pkg_swarm_homepage = https://github.com/jeremey/swarm -pkg_swarm_fetch = git -pkg_swarm_repo = https://github.com/jeremey/swarm -pkg_swarm_commit = master - -PACKAGES += switchboard -pkg_switchboard_name = switchboard -pkg_switchboard_description = A framework for processing email using worker plugins. -pkg_switchboard_homepage = https://github.com/thusfresh/switchboard -pkg_switchboard_fetch = git -pkg_switchboard_repo = https://github.com/thusfresh/switchboard -pkg_switchboard_commit = master - -PACKAGES += syn -pkg_syn_name = syn -pkg_syn_description = A global Process Registry and Process Group manager for Erlang. -pkg_syn_homepage = https://github.com/ostinelli/syn -pkg_syn_fetch = git -pkg_syn_repo = https://github.com/ostinelli/syn -pkg_syn_commit = master - -PACKAGES += sync -pkg_sync_name = sync -pkg_sync_description = On-the-fly recompiling and reloading in Erlang. -pkg_sync_homepage = https://github.com/rustyio/sync -pkg_sync_fetch = git -pkg_sync_repo = https://github.com/rustyio/sync -pkg_sync_commit = master - -PACKAGES += syntaxerl -pkg_syntaxerl_name = syntaxerl -pkg_syntaxerl_description = Syntax checker for Erlang -pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl -pkg_syntaxerl_fetch = git -pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl -pkg_syntaxerl_commit = master - -PACKAGES += syslog -pkg_syslog_name = syslog -pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) -pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog -pkg_syslog_fetch = git -pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog -pkg_syslog_commit = master - -PACKAGES += taskforce -pkg_taskforce_name = taskforce -pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. -pkg_taskforce_homepage = https://github.com/g-andrade/taskforce -pkg_taskforce_fetch = git -pkg_taskforce_repo = https://github.com/g-andrade/taskforce -pkg_taskforce_commit = master - -PACKAGES += tddreloader -pkg_tddreloader_name = tddreloader -pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes -pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader -pkg_tddreloader_fetch = git -pkg_tddreloader_repo = https://github.com/version2beta/tddreloader -pkg_tddreloader_commit = master - -PACKAGES += tempo -pkg_tempo_name = tempo -pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. -pkg_tempo_homepage = https://github.com/selectel/tempo -pkg_tempo_fetch = git -pkg_tempo_repo = https://github.com/selectel/tempo -pkg_tempo_commit = master - -PACKAGES += ticktick -pkg_ticktick_name = ticktick -pkg_ticktick_description = Ticktick is an id generator for message service. -pkg_ticktick_homepage = https://github.com/ericliang/ticktick -pkg_ticktick_fetch = git -pkg_ticktick_repo = https://github.com/ericliang/ticktick -pkg_ticktick_commit = master - -PACKAGES += tinymq -pkg_tinymq_name = tinymq -pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue -pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq -pkg_tinymq_fetch = git -pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq -pkg_tinymq_commit = master - -PACKAGES += tinymt -pkg_tinymt_name = tinymt -pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. -pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang -pkg_tinymt_fetch = git -pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang -pkg_tinymt_commit = master - -PACKAGES += tirerl -pkg_tirerl_name = tirerl -pkg_tirerl_description = Erlang interface to Elastic Search -pkg_tirerl_homepage = https://github.com/inaka/tirerl -pkg_tirerl_fetch = git -pkg_tirerl_repo = https://github.com/inaka/tirerl -pkg_tirerl_commit = master - -PACKAGES += toml -pkg_toml_name = toml -pkg_toml_description = TOML (0.4.0) config parser -pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML -pkg_toml_fetch = git -pkg_toml_repo = https://github.com/dozzie/toml -pkg_toml_commit = v0.2.0 - -PACKAGES += traffic_tools -pkg_traffic_tools_name = traffic_tools -pkg_traffic_tools_description = Simple traffic limiting library -pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools -pkg_traffic_tools_fetch = git -pkg_traffic_tools_repo = https://github.com/systra/traffic_tools -pkg_traffic_tools_commit = master - -PACKAGES += trails -pkg_trails_name = trails -pkg_trails_description = A couple of improvements over Cowboy Routes -pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ -pkg_trails_fetch = git -pkg_trails_repo = https://github.com/inaka/cowboy-trails -pkg_trails_commit = master - -PACKAGES += trane -pkg_trane_name = trane -pkg_trane_description = SAX style broken HTML parser in Erlang -pkg_trane_homepage = https://github.com/massemanet/trane -pkg_trane_fetch = git -pkg_trane_repo = https://github.com/massemanet/trane -pkg_trane_commit = master - -PACKAGES += transit -pkg_transit_name = transit -pkg_transit_description = transit format for erlang -pkg_transit_homepage = https://github.com/isaiah/transit-erlang -pkg_transit_fetch = git -pkg_transit_repo = https://github.com/isaiah/transit-erlang -pkg_transit_commit = master - -PACKAGES += trie -pkg_trie_name = trie -pkg_trie_description = Erlang Trie Implementation -pkg_trie_homepage = https://github.com/okeuday/trie -pkg_trie_fetch = git -pkg_trie_repo = https://github.com/okeuday/trie -pkg_trie_commit = master +pkg_relx_commit = main PACKAGES += triq pkg_triq_name = triq @@ -3993,294 +380,6 @@ pkg_triq_fetch = git pkg_triq_repo = https://gitlab.com/triq/triq.git pkg_triq_commit = master -PACKAGES += tunctl -pkg_tunctl_name = tunctl -pkg_tunctl_description = Erlang TUN/TAP interface -pkg_tunctl_homepage = https://github.com/msantos/tunctl -pkg_tunctl_fetch = git -pkg_tunctl_repo = https://github.com/msantos/tunctl -pkg_tunctl_commit = master - -PACKAGES += twerl -pkg_twerl_name = twerl -pkg_twerl_description = Erlang client for the Twitter Streaming API -pkg_twerl_homepage = https://github.com/lucaspiller/twerl -pkg_twerl_fetch = git -pkg_twerl_repo = https://github.com/lucaspiller/twerl -pkg_twerl_commit = oauth - -PACKAGES += twitter_erlang -pkg_twitter_erlang_name = twitter_erlang -pkg_twitter_erlang_description = An Erlang twitter client -pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter -pkg_twitter_erlang_fetch = git -pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter -pkg_twitter_erlang_commit = master - -PACKAGES += ucol_nif -pkg_ucol_nif_name = ucol_nif -pkg_ucol_nif_description = ICU based collation Erlang module -pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif -pkg_ucol_nif_fetch = git -pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif -pkg_ucol_nif_commit = master - -PACKAGES += unicorn -pkg_unicorn_name = unicorn -pkg_unicorn_description = Generic configuration server -pkg_unicorn_homepage = https://github.com/shizzard/unicorn -pkg_unicorn_fetch = git -pkg_unicorn_repo = https://github.com/shizzard/unicorn -pkg_unicorn_commit = master - -PACKAGES += unsplit -pkg_unsplit_name = unsplit -pkg_unsplit_description = Resolves conflicts in Mnesia after network splits -pkg_unsplit_homepage = https://github.com/uwiger/unsplit -pkg_unsplit_fetch = git -pkg_unsplit_repo = https://github.com/uwiger/unsplit -pkg_unsplit_commit = master - -PACKAGES += uuid -pkg_uuid_name = uuid -pkg_uuid_description = Erlang UUID Implementation -pkg_uuid_homepage = https://github.com/okeuday/uuid -pkg_uuid_fetch = git -pkg_uuid_repo = https://github.com/okeuday/uuid -pkg_uuid_commit = master - -PACKAGES += ux -pkg_ux_name = ux -pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) -pkg_ux_homepage = https://github.com/erlang-unicode/ux -pkg_ux_fetch = git -pkg_ux_repo = https://github.com/erlang-unicode/ux -pkg_ux_commit = master - -PACKAGES += vert -pkg_vert_name = vert -pkg_vert_description = Erlang binding to libvirt virtualization API -pkg_vert_homepage = https://github.com/msantos/erlang-libvirt -pkg_vert_fetch = git -pkg_vert_repo = https://github.com/msantos/erlang-libvirt -pkg_vert_commit = master - -PACKAGES += verx -pkg_verx_name = verx -pkg_verx_description = Erlang implementation of the libvirtd remote protocol -pkg_verx_homepage = https://github.com/msantos/verx -pkg_verx_fetch = git -pkg_verx_repo = https://github.com/msantos/verx -pkg_verx_commit = master - -PACKAGES += vmq_acl -pkg_vmq_acl_name = vmq_acl -pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_acl_homepage = https://verne.mq/ -pkg_vmq_acl_fetch = git -pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl -pkg_vmq_acl_commit = master - -PACKAGES += vmq_bridge -pkg_vmq_bridge_name = vmq_bridge -pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_bridge_homepage = https://verne.mq/ -pkg_vmq_bridge_fetch = git -pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge -pkg_vmq_bridge_commit = master - -PACKAGES += vmq_graphite -pkg_vmq_graphite_name = vmq_graphite -pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_graphite_homepage = https://verne.mq/ -pkg_vmq_graphite_fetch = git -pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite -pkg_vmq_graphite_commit = master - -PACKAGES += vmq_passwd -pkg_vmq_passwd_name = vmq_passwd -pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_passwd_homepage = https://verne.mq/ -pkg_vmq_passwd_fetch = git -pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd -pkg_vmq_passwd_commit = master - -PACKAGES += vmq_server -pkg_vmq_server_name = vmq_server -pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_server_homepage = https://verne.mq/ -pkg_vmq_server_fetch = git -pkg_vmq_server_repo = https://github.com/erlio/vmq_server -pkg_vmq_server_commit = master - -PACKAGES += vmq_snmp -pkg_vmq_snmp_name = vmq_snmp -pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_snmp_homepage = https://verne.mq/ -pkg_vmq_snmp_fetch = git -pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp -pkg_vmq_snmp_commit = master - -PACKAGES += vmq_systree -pkg_vmq_systree_name = vmq_systree -pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker -pkg_vmq_systree_homepage = https://verne.mq/ -pkg_vmq_systree_fetch = git -pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree -pkg_vmq_systree_commit = master - -PACKAGES += vmstats -pkg_vmstats_name = vmstats -pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. -pkg_vmstats_homepage = https://github.com/ferd/vmstats -pkg_vmstats_fetch = git -pkg_vmstats_repo = https://github.com/ferd/vmstats -pkg_vmstats_commit = master - -PACKAGES += walrus -pkg_walrus_name = walrus -pkg_walrus_description = Walrus - Mustache-like Templating -pkg_walrus_homepage = https://github.com/devinus/walrus -pkg_walrus_fetch = git -pkg_walrus_repo = https://github.com/devinus/walrus -pkg_walrus_commit = master - -PACKAGES += webmachine -pkg_webmachine_name = webmachine -pkg_webmachine_description = A REST-based system for building web applications. -pkg_webmachine_homepage = https://github.com/basho/webmachine -pkg_webmachine_fetch = git -pkg_webmachine_repo = https://github.com/basho/webmachine -pkg_webmachine_commit = master - -PACKAGES += websocket_client -pkg_websocket_client_name = websocket_client -pkg_websocket_client_description = Erlang websocket client (ws and wss supported) -pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client -pkg_websocket_client_fetch = git -pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client -pkg_websocket_client_commit = master - -PACKAGES += worker_pool -pkg_worker_pool_name = worker_pool -pkg_worker_pool_description = a simple erlang worker pool -pkg_worker_pool_homepage = https://github.com/inaka/worker_pool -pkg_worker_pool_fetch = git -pkg_worker_pool_repo = https://github.com/inaka/worker_pool -pkg_worker_pool_commit = master - -PACKAGES += wrangler -pkg_wrangler_name = wrangler -pkg_wrangler_description = Import of the Wrangler svn repository. -pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html -pkg_wrangler_fetch = git -pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler -pkg_wrangler_commit = master - -PACKAGES += wsock -pkg_wsock_name = wsock -pkg_wsock_description = Erlang library to build WebSocket clients and servers -pkg_wsock_homepage = https://github.com/madtrick/wsock -pkg_wsock_fetch = git -pkg_wsock_repo = https://github.com/madtrick/wsock -pkg_wsock_commit = master - -PACKAGES += xhttpc -pkg_xhttpc_name = xhttpc -pkg_xhttpc_description = Extensible HTTP Client for Erlang -pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc -pkg_xhttpc_fetch = git -pkg_xhttpc_repo = https://github.com/seriyps/xhttpc -pkg_xhttpc_commit = master - -PACKAGES += xref_runner -pkg_xref_runner_name = xref_runner -pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) -pkg_xref_runner_homepage = https://github.com/inaka/xref_runner -pkg_xref_runner_fetch = git -pkg_xref_runner_repo = https://github.com/inaka/xref_runner -pkg_xref_runner_commit = master - -PACKAGES += yamerl -pkg_yamerl_name = yamerl -pkg_yamerl_description = YAML 1.2 parser in pure Erlang -pkg_yamerl_homepage = https://github.com/yakaz/yamerl -pkg_yamerl_fetch = git -pkg_yamerl_repo = https://github.com/yakaz/yamerl -pkg_yamerl_commit = master - -PACKAGES += yamler -pkg_yamler_name = yamler -pkg_yamler_description = libyaml-based yaml loader for Erlang -pkg_yamler_homepage = https://github.com/goertzenator/yamler -pkg_yamler_fetch = git -pkg_yamler_repo = https://github.com/goertzenator/yamler -pkg_yamler_commit = master - -PACKAGES += yaws -pkg_yaws_name = yaws -pkg_yaws_description = Yaws webserver -pkg_yaws_homepage = http://yaws.hyber.org -pkg_yaws_fetch = git -pkg_yaws_repo = https://github.com/klacke/yaws -pkg_yaws_commit = master - -PACKAGES += zab_engine -pkg_zab_engine_name = zab_engine -pkg_zab_engine_description = zab propotocol implement by erlang -pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine -pkg_zab_engine_fetch = git -pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine -pkg_zab_engine_commit = master - -PACKAGES += zabbix_sender -pkg_zabbix_sender_name = zabbix_sender -pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang -pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender -pkg_zabbix_sender_fetch = git -pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git -pkg_zabbix_sender_commit = master - -PACKAGES += zeta -pkg_zeta_name = zeta -pkg_zeta_description = HTTP access log parser in Erlang -pkg_zeta_homepage = https://github.com/s1n4/zeta -pkg_zeta_fetch = git -pkg_zeta_repo = https://github.com/s1n4/zeta -pkg_zeta_commit = master - -PACKAGES += zippers -pkg_zippers_name = zippers -pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers -pkg_zippers_homepage = https://github.com/ferd/zippers -pkg_zippers_fetch = git -pkg_zippers_repo = https://github.com/ferd/zippers -pkg_zippers_commit = master - -PACKAGES += zlists -pkg_zlists_name = zlists -pkg_zlists_description = Erlang lazy lists library. -pkg_zlists_homepage = https://github.com/vjache/erlang-zlists -pkg_zlists_fetch = git -pkg_zlists_repo = https://github.com/vjache/erlang-zlists -pkg_zlists_commit = master - -PACKAGES += zraft_lib -pkg_zraft_lib_name = zraft_lib -pkg_zraft_lib_description = Erlang raft consensus protocol implementation -pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib -pkg_zraft_lib_fetch = git -pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib -pkg_zraft_lib_commit = master - -PACKAGES += zucchini -pkg_zucchini_name = zucchini -pkg_zucchini_description = An Erlang INI parser -pkg_zucchini_homepage = https://github.com/devinus/zucchini -pkg_zucchini_fetch = git -pkg_zucchini_repo = https://github.com/devinus/zucchini -pkg_zucchini_commit = master - # Copyright (c) 2015-2016, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -4288,7 +387,7 @@ pkg_zucchini_commit = master define pkg_print $(verbose) printf "%s\n" \ - $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + $(if $(call core_eq,$1,$(pkg_$(1)_name)),,"Pkg name: $1") \ "App name: $(pkg_$(1)_name)" \ "Description: $(pkg_$(1)_description)" \ "Home page: $(pkg_$(1)_homepage)" \ @@ -4302,10 +401,10 @@ endef search: ifdef q $(foreach p,$(PACKAGES), \ - $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ - $(call pkg_print,$(p)))) + $(if $(findstring $(call core_lc,$q),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$p))) else - $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) + $(foreach p,$(PACKAGES),$(call pkg_print,$p)) endif # Copyright (c) 2013-2016, Loïc Hoguin <[email protected]> @@ -4331,19 +430,35 @@ export DEPS_DIR REBAR_DEPS_DIR = $(DEPS_DIR) export REBAR_DEPS_DIR -REBAR_GIT ?= https://github.com/rebar/rebar -REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01 +# When testing Erlang.mk and updating these, make sure +# to delete test/test_rebar_git before running tests again. +REBAR3_GIT ?= https://github.com/erlang/rebar3 +REBAR3_COMMIT ?= bde4b54248d16280b2c70a244aca3bb7566e2033 # 3.23.0 + +CACHE_DEPS ?= 0 + +CACHE_DIR ?= $(if $(XDG_CACHE_HOME),$(XDG_CACHE_HOME),$(HOME)/.cache)/erlang.mk +export CACHE_DIR + +HEX_CONFIG ?= + +define hex_config.erl + begin + Config0 = hex_core:default_config(), + Config0$(HEX_CONFIG) + end +endef # External "early" plugins (see core/plugins.mk for regular plugins). # They both use the core_dep_plugin macro. define core_dep_plugin -ifeq ($(2),$(PROJECT)) --include $$(patsubst $(PROJECT)/%,%,$(1)) +ifeq ($2,$(PROJECT)) +-include $$(patsubst $(PROJECT)/%,%,$1) else --include $(DEPS_DIR)/$(1) +-include $(DEPS_DIR)/$1 -$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +$(DEPS_DIR)/$1: $(DEPS_DIR)/$2 ; endif endef @@ -4356,44 +471,42 @@ $(foreach p,$(DEP_EARLY_PLUGINS),\ # Query functions. -query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1))) -_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail)) +query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$1)) +_qfm_dep = $(if $(dep_fetch_$(1)),$1,fail) _qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail) -query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +query_name = $(if $(dep_$(1)),$1,$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$1)) -query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1))) -_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1))) +query_repo = $(call _qr,$1,$(call query_fetch_method,$1)) +_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$1),$(call query_repo_git,$1)) query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)) -query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1))) -query_repo_git-subfolder = $(call query_repo_git,$(1)) +query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$1)) +query_repo_git-subfolder = $(call query_repo_git,$1) query_repo_git-submodule = - -query_repo_hg = $(call query_repo_default,$(1)) -query_repo_svn = $(call query_repo_default,$(1)) -query_repo_cp = $(call query_repo_default,$(1)) -query_repo_ln = $(call query_repo_default,$(1)) -query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1)) +query_repo_hg = $(call query_repo_default,$1) +query_repo_svn = $(call query_repo_default,$1) +query_repo_cp = $(call query_repo_default,$1) +query_repo_ln = $(call query_repo_default,$1) +query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$1) query_repo_fail = - -query_repo_legacy = - -query_version = $(call _qv,$(1),$(call query_fetch_method,$(1))) -_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1))) +query_version = $(call _qv,$1,$(call query_fetch_method,$1)) +_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$1),$(call query_version_default,$1)) query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) -query_version_git = $(call query_version_default,$(1)) -query_version_git-subfolder = $(call query_version_git,$(1)) +query_version_git = $(call query_version_default,$1) +query_version_git-subfolder = $(call query_version_default,$1) query_version_git-submodule = - -query_version_hg = $(call query_version_default,$(1)) +query_version_hg = $(call query_version_default,$1) query_version_svn = - query_version_cp = - query_version_ln = - query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit))) query_version_fail = - -query_version_legacy = - -query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1))) -_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-) +query_extra = $(call _qe,$1,$(call query_fetch_method,$1)) +_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$1),-) query_extra_git = - query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-) @@ -4404,18 +517,17 @@ query_extra_cp = - query_extra_ln = - query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-) query_extra_fail = - -query_extra_legacy = - -query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1))) +query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$1)) -# Deprecated legacy query functions. -dep_fetch = $(call query_fetch_method,$(1)) +# Deprecated legacy query function. Used by RabbitMQ and its third party plugins. +# Can be removed once RabbitMQ has been updated and enough time has passed. dep_name = $(call query_name,$(1)) -dep_repo = $(call query_repo_git,$(1)) -dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit))) -LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a))) -ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) +# Application directories. + +LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$a),$(APPS_DIR)/$a)) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call query_name,$(dep)))) # When we are calling an app directly we don't want to include it here # otherwise it'll be treated both as an apps and a top-level project. @@ -4439,7 +551,7 @@ export NO_AUTOPATCH # Verbosity. -dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))"; +dep_verbose_0 = @echo " DEP $1 ($(call query_version,$1))"; dep_verbose_2 = set -x; dep_verbose = $(dep_verbose_$(V)) @@ -4516,6 +628,9 @@ ifneq ($(ALL_DEPS_DIRS),) echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ :; \ + elif [ "$$dep" = "$(DEPS_DIR)/hut" -a "$(HUT_PATCH)" ]; then \ + $(MAKE) -C $$dep app IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ $(MAKE) -C $$dep IS_DEP=1; \ if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ @@ -4534,25 +649,25 @@ endif # While Makefile file could be GNUmakefile or makefile, # in practice only Makefile is needed so far. define dep_autopatch - if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + if [ -f $(DEPS_DIR)/$1/erlang.mk ]; then \ rm -rf $(DEPS_DIR)/$1/ebin/; \ - $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ - $(call dep_autopatch_erlang_mk,$(1)); \ - elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$1)); \ + $(call dep_autopatch_erlang_mk,$1); \ + elif [ -f $(DEPS_DIR)/$1/Makefile ]; then \ if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \ $(call dep_autopatch2,$1); \ - elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ - $(call dep_autopatch2,$(1)); \ - elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \ - $(call dep_autopatch2,$(1)); \ - elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \ - $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$1/Makefile` ]; then \ + $(call dep_autopatch2,$1); \ + elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$1/Makefile` ]; then \ + $(call dep_autopatch2,$1); \ + elif [ -n "`find $(DEPS_DIR)/$1/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \ + $(call dep_autopatch2,$1); \ fi \ else \ - if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ - $(call dep_autopatch_noop,$(1)); \ + if [ ! -d $(DEPS_DIR)/$1/src/ ]; then \ + $(call dep_autopatch_noop,$1); \ else \ - $(call dep_autopatch2,$(1)); \ + $(call dep_autopatch2,$1); \ fi \ fi endef @@ -4562,19 +677,19 @@ define dep_autopatch2 mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \ rm -f $(DEPS_DIR)/$1/ebin/$1.app; \ if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \ - $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \ + $(call erlang,$(call dep_autopatch_appsrc_script.erl,$1)); \ fi; \ - $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ - if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$1)); \ + if [ -f $(DEPS_DIR)/$1/rebar -o -f $(DEPS_DIR)/$1/rebar.config -o -f $(DEPS_DIR)/$1/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \ $(call dep_autopatch_fetch_rebar); \ - $(call dep_autopatch_rebar,$(1)); \ + $(call dep_autopatch_rebar,$1); \ else \ - $(call dep_autopatch_gen,$(1)); \ + $(call dep_autopatch_gen,$1); \ fi endef define dep_autopatch_noop - printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile + printf "noop:\n" > $(DEPS_DIR)/$1/Makefile endef # Replace "include erlang.mk" with a line that will load the parent Erlang.mk @@ -4596,7 +711,7 @@ endif define dep_autopatch_gen printf "%s\n" \ "ERLC_OPTS = +debug_info" \ - "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile + "include ../../erlang.mk" > $(DEPS_DIR)/$1/Makefile endef # We use flock/lockf when available to avoid concurrency issues. @@ -4611,27 +726,27 @@ define dep_autopatch_fetch_rebar endef define dep_autopatch_fetch_rebar2 - if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ - git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \ - cd $(ERLANG_MK_TMP)/rebar; \ - git checkout -q $(REBAR_COMMIT); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar3 ]; then \ + git clone -q -n -- $(REBAR3_GIT) $(ERLANG_MK_TMP)/rebar3; \ + cd $(ERLANG_MK_TMP)/rebar3; \ + git checkout -q $(REBAR3_COMMIT); \ ./bootstrap; \ cd -; \ fi endef define dep_autopatch_rebar - if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ - mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + if [ -f $(DEPS_DIR)/$1/Makefile ]; then \ + mv $(DEPS_DIR)/$1/Makefile $(DEPS_DIR)/$1/Makefile.orig.mk; \ fi; \ - $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ - rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app + $(call erlang,$(call dep_autopatch_rebar.erl,$1)); \ + rm -f $(DEPS_DIR)/$1/ebin/$1.app endef define dep_autopatch_rebar.erl application:load(rebar), application:set_env(rebar, log_level, debug), - rmemo:start(), + {module, rebar3} = c:l(rebar3), Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of {ok, Conf0} -> Conf0; _ -> [] @@ -4665,7 +780,7 @@ define dep_autopatch_rebar.erl (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'" end, fun() -> - Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + Write("ERLC_OPTS = +debug_info\n"), case lists:keyfind(erl_opts, 1, Conf) of false -> ok; {_, ErlOpts} -> @@ -4688,12 +803,23 @@ define dep_autopatch_rebar.erl end, Write("\n") end(), - GetHexVsn = fun(N, NP) -> + GetHexVsn2 = fun(N, NP) -> case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of {ok, Lock} -> io:format("~p~n", [Lock]), - case lists:keyfind("1.1.0", 1, Lock) of - {_, LockPkgs} -> + LockPkgs = case lists:keyfind("1.2.0", 1, Lock) of + {_, LP} -> + LP; + _ -> + case lists:keyfind("1.1.0", 1, Lock) of + {_, LP} -> + LP; + _ -> + false + end + end, + if + is_list(LockPkgs) -> io:format("~p~n", [LockPkgs]), case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of {_, {pkg, _, Vsn}, _} -> @@ -4702,64 +828,83 @@ define dep_autopatch_rebar.erl _ -> false end; - _ -> + true -> false end; _ -> false end end, - SemVsn = fun - ("~>" ++ S0) -> - S = case S0 of - " " ++ S1 -> S1; - _ -> S0 - end, - case length([ok || $$. <- S]) of - 0 -> S ++ ".0.0"; - 1 -> S ++ ".0"; - _ -> S - end; - (S) -> S + GetHexVsn3Common = fun(N, NP, S0) -> + case GetHexVsn2(N, NP) of + false -> + S2 = case S0 of + " " ++ S1 -> S1; + _ -> S0 + end, + S = case length([ok || $$. <- S2]) of + 0 -> S2 ++ ".0.0"; + 1 -> S2 ++ ".0"; + _ -> S2 + end, + {N, {hex, NP, S}}; + NameSource -> + NameSource + end + end, + GetHexVsn3 = fun + (N, NP, "~>" ++ S0) -> + GetHexVsn3Common(N, NP, S0); + (N, NP, ">=" ++ S0) -> + GetHexVsn3Common(N, NP, S0); + (N, NP, S) -> {N, {hex, NP, S}} + end, + ConvertCommit = fun + ({branch, C}) -> C; + ({ref, C}) -> C; + ({tag, C}) -> C; + (C) -> C end, fun() -> File = case lists:keyfind(deps, 1, Conf) of false -> []; {_, Deps} -> [begin case case Dep of - N when is_atom(N) -> GetHexVsn(N, N); - {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}}; - {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP); - {N, S, {pkg, NP}} -> {N, {hex, NP, S}}; + N when is_atom(N) -> GetHexVsn2(N, N); + {N, S} when is_atom(N), is_list(S) -> GetHexVsn3(N, N, S); + {N, {pkg, NP}} when is_atom(N) -> GetHexVsn2(N, NP); + {N, S, {pkg, NP}} -> GetHexVsn3(N, NP, S); {N, S} when is_tuple(S) -> {N, S}; {N, _, S} -> {N, S}; {N, _, S, _} -> {N, S}; _ -> false end of false -> ok; + {Name, {git_subdir, Repo, Commit, SubDir}} -> + Write(io_lib:format("DEPS += ~s\ndep_~s = git-subfolder ~s ~s ~s~n", [Name, Name, Repo, ConvertCommit(Commit), SubDir])); {Name, Source} -> {Method, Repo, Commit} = case Source of {hex, NPV, V} -> {hex, V, NPV}; {git, R} -> {git, R, master}; - {M, R, {branch, C}} -> {M, R, C}; - {M, R, {ref, C}} -> {M, R, C}; - {M, R, {tag, C}} -> {M, R, C}; {M, R, C} -> {M, R, C} end, - Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, ConvertCommit(Commit)])) end end || Dep <- Deps] end end(), fun() -> case lists:keyfind(erl_first_files, 1, Conf) of false -> ok; - {_, Files} -> + {_, Files0} -> + Files = [begin + hd(filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)**/" ++ filename:rootname(F) ++ ".*rl")) + end || "src/" ++ F <- Files0], Names = [[" ", case lists:reverse(F) of "lre." ++ Elif -> lists:reverse(Elif); "lrx." ++ Elif -> lists:reverse(Elif); "lry." ++ Elif -> lists:reverse(Elif); Elif -> lists:reverse(Elif) - end] || "src/" ++ F <- Files], + end] || "$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ F <- Files], Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) end end(), @@ -4786,6 +931,8 @@ define dep_autopatch_rebar.erl Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); {compile, Cmd} -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {{pc, compile}, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); {Regex, compile, Cmd} -> case rebar_utils:is_arch(Regex) of true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); @@ -4876,9 +1023,11 @@ define dep_autopatch_rebar.erl [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], Output, ": $$\(foreach ext,.c .C .cc .cpp,", "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", - "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(LDLIBS) $$\(EXE_LDFLAGS)", case {filename:extension(Output), $(PLATFORM)} of {[], _} -> "\n"; + {".so", darwin} -> " -shared\n"; + {".dylib", darwin} -> " -shared\n"; {_, darwin} -> "\n"; _ -> " -shared\n" end]) @@ -4948,9 +1097,12 @@ endef define dep_autopatch_appsrc_script.erl AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", AppSrcScript = AppSrc ++ ".script", - {ok, Conf0} = file:consult(AppSrc), + Conf1 = case file:consult(AppSrc) of + {ok, Conf0} -> Conf0; + {error, enoent} -> [] + end, Bindings0 = erl_eval:new_bindings(), - Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1), Conf = case file:script(AppSrcScript, Bindings) of {ok, [C]} -> C; @@ -4966,7 +1118,7 @@ define dep_autopatch_appsrc.erl case filelib:is_regular(AppSrcIn) of false -> ok; true -> - {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + {ok, [{application, $1, L0}]} = file:consult(AppSrcIn), L1 = lists:keystore(modules, 1, L0, {modules, []}), L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))}); @@ -4974,116 +1126,168 @@ define dep_autopatch_appsrc.erl _ -> L1 end, L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, - ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $1, L3}])), case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end end, halt() endef +ifeq ($(CACHE_DEPS),1) + +define dep_cache_fetch_git + mkdir -p $(CACHE_DIR)/git; \ + if test -d "$(join $(CACHE_DIR)/git/,$(call query_name,$1))"; then \ + cd $(join $(CACHE_DIR)/git/,$(call query_name,$1)); \ + if ! git checkout -q $(call query_version,$1); then \ + git remote set-url origin $(call query_repo_git,$1) && \ + git pull --all && \ + git cat-file -e $(call query_version_git,$1) 2>/dev/null; \ + fi; \ + else \ + git clone -q -n -- $(call query_repo_git,$1) $(join $(CACHE_DIR)/git/,$(call query_name,$1)); \ + fi; \ + git clone -q --single-branch -- $(join $(CACHE_DIR)/git/,$(call query_name,$1)) $2; \ + cd $2 && git checkout -q $(call query_version_git,$1) +endef + +define dep_fetch_git + $(call dep_cache_fetch_git,$1,$(DEPS_DIR)/$(call query_name,$1)); +endef + +define dep_fetch_git-subfolder + mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \ + $(call dep_cache_fetch_git,$1,$(ERLANG_MK_TMP)/git-subfolder/$(call query_name,$1)); \ + ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call query_name,$1)/$(word 4,$(dep_$1)) \ + $(DEPS_DIR)/$(call query_name,$1); +endef + +else + define dep_fetch_git - git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ - cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); + git clone -q -n -- $(call query_repo_git,$1) $(DEPS_DIR)/$(call query_name,$1); \ + cd $(DEPS_DIR)/$(call query_name,$1) && git checkout -q $(call query_version_git,$1); endef define dep_fetch_git-subfolder mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \ - git clone -q -n -- $(call dep_repo,$1) \ - $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \ - cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \ - && git checkout -q $(call dep_commit,$1); \ - ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \ - $(DEPS_DIR)/$(call dep_name,$1); + git clone -q -n -- $(call query_repo_git-subfolder,$1) \ + $(ERLANG_MK_TMP)/git-subfolder/$(call query_name,$1); \ + cd $(ERLANG_MK_TMP)/git-subfolder/$(call query_name,$1) \ + && git checkout -q $(call query_version_git-subfolder,$1); \ + ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call query_name,$1)/$(word 4,$(dep_$1)) \ + $(DEPS_DIR)/$(call query_name,$1); endef +endif + define dep_fetch_git-submodule git submodule update --init -- $(DEPS_DIR)/$1; endef define dep_fetch_hg - hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ - cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); + hg clone -q -U $(call query_repo_hg,$1) $(DEPS_DIR)/$(call query_name,$1); \ + cd $(DEPS_DIR)/$(call query_name,$1) && hg update -q $(call query_version_hg,$1); endef define dep_fetch_svn - svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); + svn checkout -q $(call query_repo_svn,$1) $(DEPS_DIR)/$(call query_name,$1); endef define dep_fetch_cp - cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); + cp -R $(call query_repo_cp,$1) $(DEPS_DIR)/$(call query_name,$1); endef define dep_fetch_ln - ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); + ln -s $(call query_repo_ln,$1) $(DEPS_DIR)/$(call query_name,$1); +endef + +define hex_get_tarball.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + case hex_repo:get_tarball(Config, <<"$1">>, <<"$(strip $2)">>) of + {ok, {200, _, Tarball}} -> + ok = file:write_file("$3", Tarball), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(79) + end endef +ifeq ($(CACHE_DEPS),1) + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + mkdir -p $(CACHE_DIR)/hex $(DEPS_DIR)/$1; \ + $(eval hex_pkg_name := $(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)) \ + $(eval hex_tar_name := $(hex_pkg_name)-$(strip $(word 2,$(dep_$1))).tar) \ + $(if $(wildcard $(CACHE_DIR)/hex/$(hex_tar_name)),,\ + $(call erlang,$(call hex_get_tarball.erl,$(hex_pkg_name),$(word 2,$(dep_$1)),$(CACHE_DIR)/hex/$(hex_tar_name)));) \ + tar -xOf $(CACHE_DIR)/hex/$(hex_tar_name) contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -; +endef + +else + # Hex only has a package version. No need to look in the Erlang.mk packages. define dep_fetch_hex mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \ - $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\ - https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \ + $(call erlang,$(call hex_get_tarball.erl,$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1),$(word 2,$(dep_$1)),$(ERLANG_MK_TMP)/hex/$1.tar)); \ tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -; endef +endif + define dep_fetch_fail - echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + echo "Error: Unknown or invalid dependency: $1." >&2; \ exit 78; endef -# Kept for compatibility purposes with older Erlang.mk configuration. -define dep_fetch_legacy - $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ - git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ - cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); -endef - define dep_target -$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP) - $(eval DEP_NAME := $(call dep_name,$1)) +$(DEPS_DIR)/$(call query_name,$1): | $(if $(filter hex,$(call query_fetch_method,$1)),hex-core) $(ERLANG_MK_TMP) + $(eval DEP_NAME := $(call query_name,$1)) $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \ exit 17; \ fi $(verbose) mkdir -p $(DEPS_DIR) - $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1)) - $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \ - && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \ + $(dep_verbose) $(call dep_fetch_$(strip $(call query_fetch_method,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$1/configure.ac -o -f $(DEPS_DIR)/$1/configure.in ] \ + && [ ! -f $(DEPS_DIR)/$1/configure ]; then \ echo " AUTO " $(DEP_STR); \ - cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \ + cd $(DEPS_DIR)/$1 && autoreconf -Wall -vif -I m4; \ fi - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ echo " CONF " $(DEP_STR); \ cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ fi -ifeq ($(filter $(1),$(NO_AUTOPATCH)),) +ifeq ($(filter $1,$(NO_AUTOPATCH)),) $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME) endif -.PHONY: autopatch-$(call dep_name,$1) +.PHONY: autopatch-$(call query_name,$1) -autopatch-$(call dep_name,$1):: - $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ - if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ - echo " PATCH Downloading rabbitmq-codegen"; \ - git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ - fi; \ - if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ - echo " PATCH Downloading rabbitmq-server"; \ - git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ - fi; \ - ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ - elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ - if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ - echo " PATCH Downloading rabbitmq-codegen"; \ - git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ - fi \ - elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \ +autopatch-$(call query_name,$1):: + $(verbose) if [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \ ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \ else \ - $$(call dep_autopatch,$(call dep_name,$1)) \ + $$(call dep_autopatch,$(call query_name,$1)) \ fi endef +# We automatically depend on hex_core when the project isn't already. +$(if $(filter hex_core,$(DEPS) $(BUILD_DEPS) $(DOC_DEPS) $(REL_DEPS) $(TEST_DEPS)),,\ + $(eval $(call dep_target,hex_core))) + +.PHONY: hex-core + +hex-core: $(DEPS_DIR)/hex_core + $(verbose) if [ ! -e $(DEPS_DIR)/hex_core/ebin/dep_built ]; then \ + $(MAKE) -C $(DEPS_DIR)/hex_core IS_DEP=1; \ + touch $(DEPS_DIR)/hex_core/ebin/dep_built; \ + fi + $(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) ifndef IS_APP @@ -5109,6 +1313,16 @@ distclean-deps: $(gen_verbose) rm -rf $(DEPS_DIR) endif +ifeq ($(CACHE_DEPS),1) +cacheclean:: cacheclean-git cacheclean-hex + +cacheclean-git: + $(gen_verbose) rm -rf $(CACHE_DIR)/git + +cacheclean-hex: + $(gen_verbose) rm -rf $(CACHE_DIR)/hex +endif + # Forward-declare variables used in core/deps-tools.mk. This is required # in case plugins use them. @@ -5124,6 +1338,49 @@ ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log +# Copyright (c) 2024, Loïc Hoguin <[email protected]> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: beam-cache-restore-app beam-cache-restore-test clean-beam-cache distclean-beam-cache + +BEAM_CACHE_DIR ?= $(ERLANG_MK_TMP)/beam-cache +PROJECT_BEAM_CACHE_DIR = $(BEAM_CACHE_DIR)/$(PROJECT) + +clean:: clean-beam-cache + +clean-beam-cache: + $(verbose) rm -rf $(PROJECT_BEAM_CACHE_DIR) + +distclean:: distclean-beam-cache + +$(PROJECT_BEAM_CACHE_DIR): + $(verbose) mkdir -p $(PROJECT_BEAM_CACHE_DIR) + +distclean-beam-cache: + $(gen_verbose) rm -rf $(BEAM_CACHE_DIR) + +beam-cache-restore-app: | $(PROJECT_BEAM_CACHE_DIR) + $(verbose) rm -rf $(PROJECT_BEAM_CACHE_DIR)/ebin-test +ifneq ($(wildcard ebin/),) + $(verbose) mv ebin/ $(PROJECT_BEAM_CACHE_DIR)/ebin-test +endif +ifneq ($(wildcard $(PROJECT_BEAM_CACHE_DIR)/ebin-app),) + $(gen_verbose) mv $(PROJECT_BEAM_CACHE_DIR)/ebin-app ebin/ +else + $(verbose) $(MAKE) --no-print-directory clean-app +endif + +beam-cache-restore-test: | $(PROJECT_BEAM_CACHE_DIR) + $(verbose) rm -rf $(PROJECT_BEAM_CACHE_DIR)/ebin-app +ifneq ($(wildcard ebin/),) + $(verbose) mv ebin/ $(PROJECT_BEAM_CACHE_DIR)/ebin-app +endif +ifneq ($(wildcard $(PROJECT_BEAM_CACHE_DIR)/ebin-test),) + $(gen_verbose) mv $(PROJECT_BEAM_CACHE_DIR)/ebin-test ebin/ +else + $(verbose) $(MAKE) --no-print-directory clean-app +endif + # Copyright (c) 2013-2016, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -5179,7 +1436,7 @@ ifneq ($(wildcard src/),) # Targets. -app:: $(if $(wildcard ebin/test),clean) deps +app:: $(if $(wildcard ebin/test),beam-cache-restore-app) deps $(verbose) $(MAKE) --no-print-directory $(PROJECT).d $(verbose) $(MAKE) --no-print-directory app-build @@ -5188,10 +1445,11 @@ define app_file {application, '$(PROJECT)', [ {description, "$(PROJECT_DESCRIPTION)"}, {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), - {id$(comma)$(space)"$(1)"}$(comma)) - {modules, [$(call comma_list,$(2))]}, + {id$(comma)$(space)"$1"}$(comma)) + {modules, [$(call comma_list,$2)]}, {registered, []}, - {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(OPTIONAL_DEPS) $(foreach dep,$(DEPS),$(call query_name,$(dep))))]}, + {optional_applications, [$(call comma_list,$(OPTIONAL_DEPS))]}, {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) ]}. endef @@ -5200,10 +1458,11 @@ define app_file {application, '$(PROJECT)', [ {description, "$(PROJECT_DESCRIPTION)"}, {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), - {id$(comma)$(space)"$(1)"}$(comma)) - {modules, [$(call comma_list,$(2))]}, + {id$(comma)$(space)"$1"}$(comma)) + {modules, [$(call comma_list,$2)]}, {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, - {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(OPTIONAL_DEPS) $(foreach dep,$(DEPS),$(call query_name,$(dep))))]}, + {optional_applications, [$(call comma_list,$(OPTIONAL_DEPS))]}, {mod, {$(PROJECT_MOD), []}}, {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) ]}. @@ -5228,7 +1487,7 @@ ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) define compile_asn1 $(verbose) mkdir -p include/ - $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1) + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $1 $(verbose) mv asn1/*.erl src/ -$(verbose) mv asn1/*.hrl include/ $(verbose) mv asn1/*.asn1db include/ @@ -5348,7 +1607,6 @@ define makedep.erl end, MakeDepend = fun (F, Fd, Mod, StartLocation) -> - {ok, Filename} = file:pid2name(Fd), case io:parse_erl_form(Fd, undefined, StartLocation) of {ok, AbsData, EndLocation} -> case AbsData of @@ -5391,7 +1649,7 @@ define makedep.erl [233] -> unicode:characters_to_binary(Output0); _ -> Output0 end, - ok = file:write_file("$(1)", Output), + ok = file:write_file("$1", Output), halt() endef @@ -5427,7 +1685,7 @@ ebin/: define compile_erl $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ - -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $1) endef define validate_app_file @@ -5561,18 +1819,21 @@ test_erlc_verbose = $(test_erlc_verbose_$(V)) define compile_test_erl $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \ - -pa ebin/ -I include/ $(1) + -pa ebin/ -I include/ $1 endef ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl) + $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST) - $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?)) +# When we have to recompile files in src/ the .d file always gets rebuilt. +# Therefore we want to ignore it when rebuilding test files. + $(eval FILES_TO_COMPILE := $(if $(filter $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)),$?),$(filter $(ERL_TEST_FILES),$^),$(filter $(ERL_TEST_FILES),$?))) $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@) endif test-build:: IS_TEST=1 test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) -test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps) +test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,beam-cache-restore-test)) $(if $(IS_APP),,deps test-deps) # We already compiled everything when IS_APP=1. ifndef IS_APP ifneq ($(wildcard src),) @@ -5612,6 +1873,8 @@ endif .PHONY: rebar.config +compat_ref = {$(shell (git -C $(DEPS_DIR)/$1 show-ref -q --verify "refs/heads/$2" && echo branch) || (git -C $(DEPS_DIR)/$1 show-ref -q --verify "refs/tags/$2" && echo tag) || echo ref),"$2"} + # We strip out -Werror because we don't want to fail due to # warnings when used as a dependency. @@ -5630,122 +1893,18 @@ endef define compat_rebar_config {deps, [ $(call comma_list,$(foreach d,$(DEPS),\ - $(if $(filter hex,$(call dep_fetch,$d)),\ - {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\ - {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}}))) + $(if $(filter hex,$(call query_fetch_method,$d)),\ + {$(call query_name,$d)$(comma)"$(call query_version_hex,$d)"},\ + {$(call query_name,$d)$(comma)".*"$(comma){git,"$(call query_repo,$d)"$(comma)$(call compat_ref,$(call query_name,$d),$(call query_version,$d))}}))) ]}. {erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}. endef -rebar.config: +rebar.config: deps $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config) -# Copyright (c) 2015-2016, Loïc Hoguin <[email protected]> -# This file is part of erlang.mk and subject to the terms of the ISC License. - -ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck) - -.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual - -# Core targets. - -docs:: asciidoc - -distclean:: distclean-asciidoc-guide distclean-asciidoc-manual - -# Plugin-specific targets. - -asciidoc: asciidoc-guide asciidoc-manual - -# User guide. - -ifeq ($(wildcard doc/src/guide/book.asciidoc),) -asciidoc-guide: -else -asciidoc-guide: distclean-asciidoc-guide doc-deps - a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf - a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ - -distclean-asciidoc-guide: - $(gen_verbose) rm -rf doc/html/ doc/guide.pdf -endif - -# Man pages. - -ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc) - -ifeq ($(ASCIIDOC_MANUAL_FILES),) -asciidoc-manual: -else - -# Configuration. - -MAN_INSTALL_PATH ?= /usr/local/share/man -MAN_SECTIONS ?= 3 7 -MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/') -MAN_VERSION ?= $(PROJECT_VERSION) - -# Plugin-specific targets. - -define asciidoc2man.erl -try - [begin - io:format(" ADOC ~s~n", [F]), - ok = asciideck:to_manpage(asciideck:parse_file(F), #{ - compress => gzip, - outdir => filename:dirname(F), - extra2 => "$(MAN_PROJECT) $(MAN_VERSION)", - extra3 => "$(MAN_PROJECT) Function Reference" - }) - end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]], - halt(0) -catch C:E -> - io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]), - halt(1) -end. -endef - -asciidoc-manual:: doc-deps - -asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES) - $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?)) - $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;) - -install-docs:: install-asciidoc - -install-asciidoc: asciidoc-manual - $(foreach s,$(MAN_SECTIONS),\ - mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \ - install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;) - -distclean-asciidoc-manual: - $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS)) -endif -endif - -# Copyright (c) 2014-2016, Loïc Hoguin <[email protected]> -# This file is part of erlang.mk and subject to the terms of the ISC License. - -.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates - -# Core targets. - -help:: - $(verbose) printf "%s\n" "" \ - "Bootstrap targets:" \ - " bootstrap Generate a skeleton of an OTP application" \ - " bootstrap-lib Generate a skeleton of an OTP library" \ - " bootstrap-rel Generate the files needed to build a release" \ - " new-app in=NAME Create a new local OTP application NAME" \ - " new-lib in=NAME Create a new local OTP library NAME" \ - " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ - " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ - " list-templates List available templates" - -# Bootstrap templates. - -define bs_appsrc -{application, $p, [ +define tpl_application.app.src +{application, project_name, [ {description, ""}, {vsn, "0.1.0"}, {id, "git"}, @@ -5755,176 +1914,134 @@ define bs_appsrc kernel, stdlib ]}, - {mod, {$p_app, []}}, + {mod, {project_name_app, []}}, {env, []} ]}. endef -define bs_appsrc_lib -{application, $p, [ - {description, ""}, - {vsn, "0.1.0"}, - {id, "git"}, - {modules, []}, - {registered, []}, - {applications, [ - kernel, - stdlib - ]} -]}. -endef - -# To prevent autocompletion issues with ZSH, we add "include erlang.mk" -# separately during the actual bootstrap. -define bs_Makefile -PROJECT = $p -PROJECT_DESCRIPTION = New project -PROJECT_VERSION = 0.1.0 -$(if $(SP), -# Whitespace to be used when creating files from templates. -SP = $(SP) -) -endef - -define bs_apps_Makefile -PROJECT = $p -PROJECT_DESCRIPTION = New project -PROJECT_VERSION = 0.1.0 -$(if $(SP), -# Whitespace to be used when creating files from templates. -SP = $(SP) -) -# Make sure we know where the applications are located. -ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app) -APPS_DIR ?= .. -DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app) - -include $$(ROOT_DIR)/erlang.mk -endef - -define bs_app --module($p_app). +define tpl_application +-module(project_name_app). -behaviour(application). -export([start/2]). -export([stop/1]). start(_Type, _Args) -> - $p_sup:start_link(). + project_name_sup:start_link(). stop(_State) -> ok. endef -define bs_relx_config -{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}. -{extended_start_script, true}. -{sys_config, "config/sys.config"}. -{vm_args, "config/vm.args"}. -endef +define tpl_apps_Makefile +PROJECT = project_name +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.1.0 +template_sp +# Make sure we know where the applications are located. +ROOT_DIR ?= rel_root_dir +APPS_DIR ?= .. +DEPS_DIR ?= rel_deps_dir -define bs_sys_config -[ -]. +include rel_root_dir/erlang.mk endef -define bs_vm_args --name [email protected] --setcookie $p --heart -endef +define tpl_cowboy_http_h +-module(template_name). +-behaviour(cowboy_http_handler). -# Normal templates. +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). -define tpl_supervisor --module($(n)). --behaviour(supervisor). +-record(state, { +}). --export([start_link/0]). --export([init/1]). +init(_, Req, _Opts) -> + {ok, Req, #state{}}. -start_link() -> - supervisor:start_link({local, ?MODULE}, ?MODULE, []). +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. -init([]) -> - Procs = [], - {ok, {{one_for_one, 1, 5}, Procs}}. +terminate(_Reason, _Req, _State) -> + ok. endef -define tpl_gen_server --module($(n)). --behaviour(gen_server). - -%% API. --export([start_link/0]). +define tpl_cowboy_loop_h +-module(template_name). +-behaviour(cowboy_loop_handler). -%% gen_server. --export([init/1]). --export([handle_call/3]). --export([handle_cast/2]). --export([handle_info/2]). --export([terminate/2]). --export([code_change/3]). +-export([init/3]). +-export([info/3]). +-export([terminate/3]). -record(state, { }). -%% API. - --spec start_link() -> {ok, pid()}. -start_link() -> - gen_server:start_link(?MODULE, [], []). - -%% gen_server. +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. -init([]) -> - {ok, #state{}}. +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. -handle_call(_Request, _From, State) -> - {reply, ignored, State}. +terminate(_Reason, _Req, _State) -> + ok. +endef -handle_cast(_Msg, State) -> - {noreply, State}. +define tpl_cowboy_rest_h +-module(template_name). -handle_info(_Info, State) -> - {noreply, State}. +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). -terminate(_Reason, _State) -> - ok. +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. -code_change(_OldVsn, State, _Extra) -> - {ok, State}. -endef +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. -define tpl_module --module($(n)). --export([]). +get_html(Req, State) -> + {<<"<html><body>This is REST!</body></html>">>, Req, State}. endef -define tpl_cowboy_http --module($(n)). --behaviour(cowboy_http_handler). +define tpl_cowboy_websocket_h +-module(template_name). +-behaviour(cowboy_websocket_handler). -export([init/3]). --export([handle/2]). --export([terminate/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). -record(state, { }). -init(_, Req, _Opts) -> - {ok, Req, #state{}}. +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. -handle(Req, State=#state{}) -> - {ok, Req2} = cowboy_req:reply(200, Req), - {ok, Req2, State}. +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. -terminate(_Reason, _Req, _State) -> +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> ok. endef define tpl_gen_fsm --module($(n)). +-module(template_name). -behaviour(gen_fsm). %% API. @@ -5976,8 +2093,53 @@ code_change(_OldVsn, StateName, StateData, _Extra) -> {ok, StateName, StateData}. endef +define tpl_gen_server +-module(template_name). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + define tpl_gen_statem --module($(n)). +-module(template_name). -behaviour(gen_statem). %% API. @@ -6021,80 +2183,27 @@ code_change(_OldVsn, StateName, StateData, _Extra) -> {ok, StateName, StateData}. endef -define tpl_cowboy_loop --module($(n)). --behaviour(cowboy_loop_handler). - --export([init/3]). --export([info/3]). --export([terminate/3]). - --record(state, { -}). - -init(_, Req, _Opts) -> - {loop, Req, #state{}, 5000, hibernate}. - -info(_Info, Req, State) -> - {loop, Req, State, hibernate}. - -terminate(_Reason, _Req, _State) -> - ok. -endef - -define tpl_cowboy_rest --module($(n)). - --export([init/3]). --export([content_types_provided/2]). --export([get_html/2]). - -init(_, _Req, _Opts) -> - {upgrade, protocol, cowboy_rest}. - -content_types_provided(Req, State) -> - {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. - -get_html(Req, State) -> - {<<"<html><body>This is REST!</body></html>">>, Req, State}. +define tpl_library.app.src +{application, project_name, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. endef -define tpl_cowboy_ws --module($(n)). --behaviour(cowboy_websocket_handler). - --export([init/3]). --export([websocket_init/3]). --export([websocket_handle/3]). --export([websocket_info/3]). --export([websocket_terminate/3]). - --record(state, { -}). - -init(_, _, _) -> - {upgrade, protocol, cowboy_websocket}. - -websocket_init(_, Req, _Opts) -> - Req2 = cowboy_req:compact(Req), - {ok, Req2, #state{}}. - -websocket_handle({text, Data}, Req, State) -> - {reply, {text, Data}, Req, State}; -websocket_handle({binary, Data}, Req, State) -> - {reply, {binary, Data}, Req, State}; -websocket_handle(_Frame, Req, State) -> - {ok, Req, State}. - -websocket_info(_Info, Req, State) -> - {ok, Req, State}. - -websocket_terminate(_Reason, _Req, _State) -> - ok. +define tpl_module +-module(template_name). +-export([]). endef define tpl_ranch_protocol --module($(n)). +-module(template_name). -behaviour(ranch_protocol). -export([start_link/4]). @@ -6121,6 +2230,152 @@ loop(State) -> loop(State). endef +define tpl_relx.config +{release, {project_name_release, "1"}, [project_name, sasl, runtime_tools]}. +{dev_mode, false}. +{include_erts, true}. +{extended_start_script, true}. +{sys_config, "config/sys.config"}. +{vm_args, "config/vm.args"}. +endef + +define tpl_supervisor +-module(template_name). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_sys.config +[ +]. +endef + +define tpl_top_Makefile +PROJECT = project_name +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.1.0 +template_sp +include erlang.mk +endef + +define tpl_vm.args +-name [email protected] +-setcookie project_name +-heart +endef + + +# Copyright (c) 2015-2016, Loïc Hoguin <[email protected]> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck) + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual + +# Core targets. + +docs:: asciidoc + +distclean:: distclean-asciidoc-guide distclean-asciidoc-manual + +# Plugin-specific targets. + +asciidoc: asciidoc-guide asciidoc-manual + +# User guide. + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: distclean-asciidoc-guide doc-deps + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ + +distclean-asciidoc-guide: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf +endif + +# Man pages. + +ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc) + +ifeq ($(ASCIIDOC_MANUAL_FILES),) +asciidoc-manual: +else + +# Configuration. + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 +MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/') +MAN_VERSION ?= $(PROJECT_VERSION) + +# Plugin-specific targets. + +define asciidoc2man.erl +try + [begin + io:format(" ADOC ~s~n", [F]), + ok = asciideck:to_manpage(asciideck:parse_file(F), #{ + compress => gzip, + outdir => filename:dirname(F), + extra2 => "$(MAN_PROJECT) $(MAN_VERSION)", + extra3 => "$(MAN_PROJECT) Function Reference" + }) + end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]], + halt(0) +catch C:E$(if $V,:S) -> + io:format("Exception: ~p:~p~n$(if $V,Stacktrace: ~p~n)", [C, E$(if $V,$(comma) S)]), + halt(1) +end. +endef + +asciidoc-manual:: doc-deps + +asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES) + $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?)) + $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;) + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + $(foreach s,$(MAN_SECTIONS),\ + mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \ + install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;) + +distclean-asciidoc-manual: + $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS)) +endif +endif + +# Copyright (c) 2014-2016, Loïc Hoguin <[email protected]> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app in=NAME Create a new local OTP application NAME" \ + " new-lib in=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + # Plugin-specific targets. ifndef WS @@ -6131,6 +2386,26 @@ WS = $(tab) endif endif +ifdef SP +define template_sp + +# By default templates indent with a single tab per indentation +# level. Set this variable to the number of spaces you prefer: +SP = $(SP) + +endef +else +template_sp = +endif + +# @todo Additional template placeholders could be added. +subst_template = $(subst rel_root_dir,$(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app),$(subst rel_deps_dir,$(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app),$(subst template_sp,$(template_sp),$(subst project_name,$p,$(subst template_name,$n,$1))))) + +define core_render_template + $(eval define _tpl_$(1)$(newline)$(call subst_template,$(tpl_$(1)))$(newline)endef) + $(verbose) $(call core_render,_tpl_$(1),$2) +endef + bootstrap: ifneq ($(wildcard src/),) $(error Error: src/ directory already exists) @@ -6139,14 +2414,13 @@ endif $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ $(error Error: Invalid characters in the application name)) $(eval n := $(PROJECT)_sup) - $(verbose) $(call core_render,bs_Makefile,Makefile) - $(verbose) echo "include erlang.mk" >> Makefile + $(verbose) $(call core_render_template,top_Makefile,Makefile) $(verbose) mkdir src/ ifdef LEGACY - $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src) + $(verbose) $(call core_render_template,application.app.src,src/$(PROJECT).app.src) endif - $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl) - $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl) + $(verbose) $(call core_render_template,application,src/$(PROJECT)_app.erl) + $(verbose) $(call core_render_template,supervisor,src/$(PROJECT)_sup.erl) bootstrap-lib: ifneq ($(wildcard src/),) @@ -6155,11 +2429,10 @@ endif $(eval p := $(PROJECT)) $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ $(error Error: Invalid characters in the application name)) - $(verbose) $(call core_render,bs_Makefile,Makefile) - $(verbose) echo "include erlang.mk" >> Makefile + $(verbose) $(call core_render_template,top_Makefile,Makefile) $(verbose) mkdir src/ ifdef LEGACY - $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src) + $(verbose) $(call core_render_template,library.app.src,src/$(PROJECT).app.src) endif bootstrap-rel: @@ -6170,10 +2443,12 @@ ifneq ($(wildcard config/),) $(error Error: config/ directory already exists) endif $(eval p := $(PROJECT)) - $(verbose) $(call core_render,bs_relx_config,relx.config) + $(verbose) $(call core_render_template,relx.config,relx.config) $(verbose) mkdir config/ - $(verbose) $(call core_render,bs_sys_config,config/sys.config) - $(verbose) $(call core_render,bs_vm_args,config/vm.args) + $(verbose) $(call core_render_template,sys.config,config/sys.config) + $(verbose) $(call core_render_template,vm.args,config/vm.args) + $(verbose) awk '/^include erlang.mk/ && !ins {print "REL_DEPS += relx";ins=1};{print}' Makefile > Makefile.bak + $(verbose) mv Makefile.bak Makefile new-app: ifndef in @@ -6187,12 +2462,12 @@ endif $(error Error: Invalid characters in the application name)) $(eval n := $(in)_sup) $(verbose) mkdir -p $(APPS_DIR)/$p/src/ - $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) + $(verbose) $(call core_render_template,apps_Makefile,$(APPS_DIR)/$p/Makefile) ifdef LEGACY - $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) + $(verbose) $(call core_render_template,application.app.src,$(APPS_DIR)/$p/src/$p.app.src) endif - $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) - $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + $(verbose) $(call core_render_template,application,$(APPS_DIR)/$p/src/$p_app.erl) + $(verbose) $(call core_render_template,supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) new-lib: ifndef in @@ -6205,30 +2480,40 @@ endif $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ $(error Error: Invalid characters in the application name)) $(verbose) mkdir -p $(APPS_DIR)/$p/src/ - $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) + $(verbose) $(call core_render_template,apps_Makefile,$(APPS_DIR)/$p/Makefile) ifdef LEGACY - $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) + $(verbose) $(call core_render_template,library.app.src,$(APPS_DIR)/$p/src/$p.app.src) endif +# These are not necessary because we don't expose those as "normal" templates. +BOOTSTRAP_TEMPLATES = apps_Makefile top_Makefile \ + application.app.src library.app.src application \ + relx.config sys.config vm.args + +# Templates may override the path they will be written to when using 'new'. +# Only special template paths must be listed. Default is src/template_name.erl +# Substitution is also applied to the paths. Examples: +# +#tplp_top_Makefile = Makefile +#tplp_application.app.src = src/project_name.app.src +#tplp_application = src/project_name_app.erl +#tplp_relx.config = relx.config + +# Erlang.mk bundles its own templates at build time into the erlang.mk file. + new: -ifeq ($(wildcard src/)$(in),) - $(error Error: src/ directory does not exist) -endif -ifndef t - $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) -endif -ifndef n - $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) -endif -ifdef in - $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl) -else - $(verbose) $(call core_render,tpl_$(t),src/$(n).erl) -endif + $(if $(t),,$(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])) + $(if $(n),,$(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])) + $(if $(tpl_$(t)),,$(error Error: $t template does not exist; try $(Make) list-templates)) + $(eval dest := $(if $(in),$(APPS_DIR)/$(in)/)$(call subst_template,$(if $(tplp_$(t)),$(tplp_$(t)),src/template_name.erl))) + $(if $(wildcard $(dir $(dest))),,$(error Error: $(dir $(dest)) directory does not exist)) + $(if $(wildcard $(dest)),$(error Error: The file $(dest) already exists)) + $(eval p := $(PROJECT)) + $(call core_render_template,$(t),$(dest)) list-templates: $(verbose) @echo Available templates: - $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + $(verbose) printf " %s\n" $(sort $(filter-out $(BOOTSTRAP_TEMPLATES),$(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))) # Copyright (c) 2014-2016, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -6247,17 +2532,24 @@ C_SRC_TYPE ?= shared ifeq ($(PLATFORM),msys2) C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll + C_SRC_OUTPUT_STATIC_EXTENSION ?= .lib else C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= C_SRC_OUTPUT_SHARED_EXTENSION ?= .so + C_SRC_OUTPUT_STATIC_EXTENSION ?= .a endif ifeq ($(C_SRC_TYPE),shared) C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION) +else ifeq ($(C_SRC_TYPE),static) + C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_STATIC_EXTENSION) else C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION) endif +RANLIB ?= ranlib +ARFLAGS ?= cr + ifeq ($(PLATFORM),msys2) # We hardcode the compiler used on MSYS2. The default CC=cc does # not produce working code. The "gcc" MSYS2 package also doesn't. @@ -6267,9 +2559,9 @@ ifeq ($(PLATFORM),msys2) CXXFLAGS ?= -O3 -finline-functions -Wall else ifeq ($(PLATFORM),darwin) CC ?= cc - CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes - CXXFLAGS ?= -O3 -arch x86_64 -Wall - LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress + CFLAGS ?= -O3 -std=c99 -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -Wall + LDFLAGS ?= -flat_namespace -undefined suppress else ifeq ($(PLATFORM),freebsd) CC ?= cc CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes @@ -6285,6 +2577,11 @@ ifneq ($(PLATFORM),msys2) CXXFLAGS += -fPIC endif +ifeq ($(C_SRC_TYPE),static) + CFLAGS += -DSTATIC_ERLANG_NIF=1 + CXXFLAGS += -DSTATIC_ERLANG_NIF=1 +endif + CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)" CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)" @@ -6301,6 +2598,12 @@ cpp_verbose = $(cpp_verbose_$(V)) link_verbose_0 = @echo " LD " $(@F); link_verbose = $(link_verbose_$(V)) +ar_verbose_0 = @echo " AR " $(@F); +ar_verbose = $(ar_verbose_$(V)) + +ranlib_verbose_0 = @echo " RANLIB" $(@F); +ranlib_verbose = $(ranlib_verbose_$(V)) + # Targets. ifeq ($(wildcard $(C_SRC_DIR)),) @@ -6329,11 +2632,19 @@ app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE) test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE) +ifneq ($(C_SRC_TYPE),static) $(C_SRC_OUTPUT_FILE): $(OBJECTS) $(verbose) mkdir -p $(dir $@) $(link_verbose) $(CC) $(OBJECTS) \ $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ -o $(C_SRC_OUTPUT_FILE) +else +$(C_SRC_OUTPUT_FILE): $(OBJECTS) + $(verbose) mkdir -p $(dir $@) + $(ar_verbose) $(AR) $(ARFLAGS) $(C_SRC_OUTPUT_FILE) $(OBJECTS) + $(ranlib_verbose) $(RANLIB) $(C_SRC_OUTPUT_FILE) +endif + $(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV) @@ -6484,24 +2795,14 @@ endif .PHONY: ci ci-prepare ci-setup CI_OTP ?= -CI_HIPE ?= -CI_ERLLVM ?= - -ifeq ($(CI_VM),native) -ERLC_OPTS += +native -TEST_ERLC_OPTS += +native -else ifeq ($(CI_VM),erllvm) -ERLC_OPTS += +native +'{hipe, [to_llvm]}' -TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}' -endif -ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),) +ifeq ($(strip $(CI_OTP)),) ci:: else -ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM))) +ci:: $(addprefix ci-,$(CI_OTP)) -ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE))) +ci-prepare: $(addprefix ci-prepare-,$(CI_OTP)) ci-setup:: $(verbose) : @@ -6509,11 +2810,14 @@ ci-setup:: ci-extra:: $(verbose) : -ci_verbose_0 = @echo " CI " $(1); +ci_verbose_0 = @echo " CI " $1; ci_verbose = $(ci_verbose_$(V)) define ci_target -ci-$1: $(KERL_INSTALL_DIR)/$2 +ci-prepare-$1: $(KERL_INSTALL_DIR)/$2 + $(verbose) : + +ci-$1: ci-prepare-$1 $(verbose) $(MAKE) --no-print-directory clean $(ci_verbose) \ PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \ @@ -6525,11 +2829,8 @@ ci-$1: $(KERL_INSTALL_DIR)/$2 endef $(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp))) -$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native))) -$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm))) $(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp)))) -$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp)))) help:: $(verbose) printf "%s\n" "" \ @@ -6689,9 +2990,9 @@ endif endif define ct_suite_target -ct-$(1): test-build - $(verbose) mkdir -p $(CT_LOGS_DIR) - $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS) +ct-$1: test-build + $$(verbose) mkdir -p $$(CT_LOGS_DIR) + $$(gen_verbose_esc) $$(CT_RUN) -sname ct_$$(PROJECT) -suite $$(addsuffix _SUITE,$1) $$(CT_EXTRA) $$(CT_OPTS) endef $(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) @@ -6711,7 +3012,7 @@ export DIALYZER_PLT PLT_APPS ?= DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS) -DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs +DIALYZER_OPTS ?= -Werror_handling -Wunmatched_returns # -Wunderspecs DIALYZER_PLT_OPTS ?= # Core targets. @@ -6770,7 +3071,7 @@ dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app) else dialyze: $(DIALYZER_PLT) endif - $(verbose) dialyzer --no_native `$(ERL) \ + $(verbose) dialyzer `$(ERL) \ -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \ -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/) @@ -6787,7 +3088,11 @@ EDOC_OUTPUT ?= doc define edoc.erl SrcPaths = lists:foldl(fun(P, Acc) -> - filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc + filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") + ++ lists:filter(fun(D) -> + filelib:is_dir(D) + end, filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}/**")) + ++ Acc end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]), DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}], edoc:application($(1), ".", [$(2)] ++ DefaultOpts), @@ -6904,11 +3209,11 @@ help:: escript-zip:: FULL=1 escript-zip:: deps app - $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP)) - $(verbose) rm -f $(ESCRIPT_ZIP_FILE) - $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/* + $(verbose) mkdir -p $(dir $(abspath $(ESCRIPT_ZIP_FILE))) + $(verbose) rm -f $(abspath $(ESCRIPT_ZIP_FILE)) + $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(abspath $(ESCRIPT_ZIP_FILE)) $(PROJECT)/ebin/* ifneq ($(DEPS),) - $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \ + $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(abspath $(ESCRIPT_ZIP_FILE)) \ $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \ $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log))))) endif @@ -6918,11 +3223,11 @@ escript:: escript-zip "#!$(ESCRIPT_SHEBANG)" \ "%% $(ESCRIPT_COMMENT)" \ "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE) - $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE) + $(verbose) cat $(abspath $(ESCRIPT_ZIP_FILE)) >> $(ESCRIPT_FILE) $(verbose) chmod +x $(ESCRIPT_FILE) distclean-escript: - $(gen_verbose) rm -f $(ESCRIPT_FILE) + $(gen_verbose) rm -f $(ESCRIPT_FILE) $(abspath $(ESCRIPT_ZIP_FILE)) # Copyright (c) 2015-2016, Loïc Hoguin <[email protected]> # Copyright (c) 2014, Enrique Fernandez <[email protected]> @@ -6934,6 +3239,7 @@ distclean-escript: EUNIT_OPTS ?= EUNIT_ERL_OPTS ?= +EUNIT_TEST_SPEC ?= $1 # Core targets. @@ -6949,7 +3255,7 @@ help:: define eunit.erl $(call cover.erl) CoverSetup(), - case eunit:test($1, [$(EUNIT_OPTS)]) of + case eunit:test($(call EUNIT_TEST_SPEC,$1), [$(EUNIT_OPTS)]) of ok -> ok; error -> halt(2) end, @@ -6987,6 +3293,312 @@ apps-eunit: test-build endif endif +# Copyright (c) 2020, Loïc Hoguin <[email protected]> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +define hex_user_create.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + case hex_api_user:create(Config, <<"$(strip $1)">>, <<"$(strip $2)">>, <<"$(strip $3)">>) of + {ok, {201, _, #{<<"email">> := Email, <<"url">> := URL, <<"username">> := Username}}} -> + io:format("User ~s (~s) created at ~s~n" + "Please check your inbox for a confirmation email.~n" + "You must confirm before you are allowed to publish packages.~n", + [Username, Email, URL]), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(80) + end +endef + +# The $(info ) call inserts a new line after the password prompt. +hex-user-create: hex-core + $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username))) + $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info ))) + $(if $(HEX_EMAIL),,$(eval HEX_EMAIL := $(shell read -p "Email: " email; echo $$email))) + $(gen_verbose) $(call erlang,$(call hex_user_create.erl,$(HEX_USERNAME),$(HEX_PASSWORD),$(HEX_EMAIL))) + +define hex_key_add.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => iolist_to_binary([<<"Basic ">>, base64:encode(<<"$(strip $1):$(strip $2)">>)])}, + Permissions = [ + case string:split(P, <<":">>) of + [D] -> #{domain => D}; + [D, R] -> #{domain => D, resource => R} + end + || P <- string:split(<<"$(strip $4)">>, <<",">>, all)], + case hex_api_key:add(ConfigF, <<"$(strip $3)">>, Permissions) of + {ok, {201, _, #{<<"secret">> := Secret}}} -> + io:format("Key ~s created for user ~s~nSecret: ~s~n" + "Please store the secret in a secure location, such as a password store.~n" + "The secret will be requested for most Hex-related operations.~n", + [<<"$(strip $3)">>, <<"$(strip $1)">>, Secret]), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(81) + end +endef + +hex-key-add: hex-core + $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username))) + $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_key_add.erl,$(HEX_USERNAME),$(HEX_PASSWORD),\ + $(if $(name),$(name),$(shell hostname)-erlang-mk),\ + $(if $(perm),$(perm),api))) + +HEX_TARBALL_EXTRA_METADATA ?= + +# @todo Check that we can += files +HEX_TARBALL_FILES ?= \ + $(wildcard early-plugins.mk) \ + $(wildcard ebin/$(PROJECT).app) \ + $(wildcard ebin/$(PROJECT).appup) \ + $(wildcard $(notdir $(ERLANG_MK_FILENAME))) \ + $(sort $(call core_find,include/,*.hrl)) \ + $(wildcard LICENSE*) \ + $(wildcard Makefile) \ + $(wildcard plugins.mk) \ + $(sort $(call core_find,priv/,*)) \ + $(wildcard README*) \ + $(wildcard rebar.config) \ + $(sort $(if $(LEGACY),$(filter-out src/$(PROJECT).app.src,$(call core_find,src/,*)),$(call core_find,src/,*))) + +HEX_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT).tar + +# @todo Need to check for rebar.config and/or the absence of DEPS to know +# whether a project will work with Rebar. +# +# @todo contributors licenses links in HEX_TARBALL_EXTRA_METADATA + +# In order to build the requirements metadata we look into DEPS. +# We do not require that the project use Hex dependencies, however +# Hex.pm does require that the package name and version numbers +# correspond to a real Hex package. +define hex_tarball_create.erl + Files0 = [$(call comma_list,$(patsubst %,"%",$(HEX_TARBALL_FILES)))], + Requirements0 = #{ + $(foreach d,$(DEPS), + <<"$(if $(subst hex,,$(call query_fetch_method,$d)),$d,$(if $(word 3,$(dep_$d)),$(word 3,$(dep_$d)),$d))">> => #{ + <<"app">> => <<"$d">>, + <<"optional">> => false, + <<"requirement">> => <<"$(if $(hex_req_$d),$(strip $(hex_req_$d)),$(call query_version,$d))">> + },) + $(if $(DEPS),dummy => dummy) + }, + Requirements = maps:remove(dummy, Requirements0), + Metadata0 = #{ + app => <<"$(strip $(PROJECT))">>, + build_tools => [<<"make">>, <<"rebar3">>], + description => <<"$(strip $(PROJECT_DESCRIPTION))">>, + files => [unicode:characters_to_binary(F) || F <- Files0], + name => <<"$(strip $(PROJECT))">>, + requirements => Requirements, + version => <<"$(strip $(PROJECT_VERSION))">> + }, + Metadata = Metadata0$(HEX_TARBALL_EXTRA_METADATA), + Files = [case file:read_file(F) of + {ok, Bin} -> + {F, Bin}; + {error, Reason} -> + io:format("Error trying to open file ~0p: ~0p~n", [F, Reason]), + halt(82) + end || F <- Files0], + case hex_tarball:create(Metadata, Files) of + {ok, #{tarball := Tarball}} -> + ok = file:write_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))", Tarball), + halt(0); + {error, Reason} -> + io:format("Error ~0p~n", [Reason]), + halt(83) + end +endef + +hex_tar_verbose_0 = @echo " TAR $(notdir $(ERLANG_MK_TMP))/$(@F)"; +hex_tar_verbose_2 = set -x; +hex_tar_verbose = $(hex_tar_verbose_$(V)) + +$(HEX_TARBALL_OUTPUT_FILE): hex-core app + $(hex_tar_verbose) $(call erlang,$(call hex_tarball_create.erl)) + +hex-tarball-create: $(HEX_TARBALL_OUTPUT_FILE) + +define hex_release_publish_summary.erl + {ok, Tarball} = erl_tar:open("$(strip $(HEX_TARBALL_OUTPUT_FILE))", [read]), + ok = erl_tar:extract(Tarball, [{cwd, "$(ERLANG_MK_TMP)"}, {files, ["metadata.config"]}]), + {ok, Metadata} = file:consult("$(ERLANG_MK_TMP)/metadata.config"), + #{ + <<"name">> := Name, + <<"version">> := Version, + <<"files">> := Files, + <<"requirements">> := Deps + } = maps:from_list(Metadata), + io:format("Publishing ~s ~s~n Dependencies:~n", [Name, Version]), + case Deps of + [] -> + io:format(" (none)~n"); + _ -> + [begin + #{<<"app">> := DA, <<"requirement">> := DR} = maps:from_list(D), + io:format(" ~s ~s~n", [DA, DR]) + end || {_, D} <- Deps] + end, + io:format(" Included files:~n"), + [io:format(" ~s~n", [F]) || F <- Files], + io:format("You may also review the contents of the tarball file.~n" + "Please enter your secret key to proceed.~n"), + halt(0) +endef + +define hex_release_publish.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + {ok, Tarball} = file:read_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))"), + case hex_api_release:publish(ConfigF, Tarball, [{replace, $2}]) of + {ok, {200, _, #{}}} -> + io:format("Release replaced~n"), + halt(0); + {ok, {201, _, #{}}} -> + io:format("Release published~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(84) + end +endef + +hex-release-tarball: hex-core $(HEX_TARBALL_OUTPUT_FILE) + $(verbose) $(call erlang,$(call hex_release_publish_summary.erl)) + +hex-release-publish: hex-core hex-release-tarball + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),false)) + +hex-release-replace: hex-core hex-release-tarball + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),true)) + +define hex_release_delete.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + case hex_api_release:delete(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $(PROJECT_VERSION))">>) of + {ok, {204, _, _}} -> + io:format("Release $(strip $(PROJECT_VERSION)) deleted~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(85) + end +endef + +hex-release-delete: hex-core + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_release_delete.erl,$(HEX_SECRET))) + +define hex_release_retire.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + Params = #{<<"reason">> => <<"$(strip $3)">>, <<"message">> => <<"$(strip $4)">>}, + case hex_api_release:retire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>, Params) of + {ok, {204, _, _}} -> + io:format("Release $(strip $2) has been retired~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(86) + end +endef + +hex-release-retire: hex-core + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_release_retire.erl,$(HEX_SECRET),\ + $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)),\ + $(if $(HEX_REASON),$(HEX_REASON),invalid),\ + $(HEX_MESSAGE))) + +define hex_release_unretire.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + case hex_api_release:unretire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>) of + {ok, {204, _, _}} -> + io:format("Release $(strip $2) is not retired anymore~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(87) + end +endef + +hex-release-unretire: hex-core + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_release_unretire.erl,$(HEX_SECRET),\ + $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)))) + +HEX_DOCS_DOC_DIR ?= doc/ +HEX_DOCS_TARBALL_FILES ?= $(sort $(call core_find,$(HEX_DOCS_DOC_DIR),*)) +HEX_DOCS_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT)-docs.tar.gz + +$(HEX_DOCS_TARBALL_OUTPUT_FILE): hex-core app docs + $(hex_tar_verbose) tar czf $(HEX_DOCS_TARBALL_OUTPUT_FILE) -C $(HEX_DOCS_DOC_DIR) \ + $(HEX_DOCS_TARBALL_FILES:$(HEX_DOCS_DOC_DIR)%=%) + +hex-docs-tarball-create: $(HEX_DOCS_TARBALL_OUTPUT_FILE) + +define hex_docs_publish.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + {ok, Tarball} = file:read_file("$(strip $(HEX_DOCS_TARBALL_OUTPUT_FILE))"), + case hex_api:post(ConfigF, + ["packages", "$(strip $(PROJECT))", "releases", "$(strip $(PROJECT_VERSION))", "docs"], + {"application/octet-stream", Tarball}) of + {ok, {Status, _, _}} when Status >= 200, Status < 300 -> + io:format("Docs published~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(88) + end +endef + +hex-docs-publish: hex-core hex-docs-tarball-create + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_docs_publish.erl,$(HEX_SECRET))) + +define hex_docs_delete.erl + {ok, _} = application:ensure_all_started(ssl), + {ok, _} = application:ensure_all_started(inets), + Config = $(hex_config.erl), + ConfigF = Config#{api_key => <<"$(strip $1)">>}, + case hex_api:delete(ConfigF, + ["packages", "$(strip $(PROJECT))", "releases", "$(strip $2)", "docs"]) of + {ok, {Status, _, _}} when Status >= 200, Status < 300 -> + io:format("Docs removed~n"), + halt(0); + {ok, {Status, _, Errors}} -> + io:format("Error ~b: ~0p~n", [Status, Errors]), + halt(89) + end +endef + +hex-docs-delete: hex-core + $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info ))) + $(gen_verbose) $(call erlang,$(call hex_docs_delete.erl,$(HEX_SECRET),\ + $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)))) + # Copyright (c) 2015-2017, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -7026,8 +3638,8 @@ define proper_check.erl end of true -> halt(0); _ -> halt(1) - catch error:undef -> - io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), + catch error:undef$(if $V,:Stacktrace) -> + io:format("Undefined property or module?~n$(if $V,~p~n)", [$(if $V,Stacktrace)]), halt(0) end. endef @@ -7092,10 +3704,13 @@ else define compile_proto.erl [begin gpb_compile:file(F, [ + $(foreach i,$(sort $(dir $(PROTO_FILES))),{i$(comma) "$i"}$(comma)) {include_as_lib, true}, {module_name_suffix, "_pb"}, {o_hrl, "./include"}, - {o_erl, "./src"}]) + {o_erl, "./src"}, + {use_packages, true} + ]) end || F <- string:tokens("$1", " ")], halt(). endef @@ -7112,15 +3727,14 @@ endif # Copyright (c) 2013-2016, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. +ifeq ($(filter relx,$(BUILD_DEPS) $(DEPS) $(REL_DEPS)),relx) .PHONY: relx-rel relx-relup distclean-relx-rel run # Configuration. -RELX ?= $(ERLANG_MK_TMP)/relx RELX_CONFIG ?= $(CURDIR)/relx.config +RELX_CONFIG_SCRIPT ?= $(CURDIR)/relx.config.script -RELX_URL ?= https://erlang.mk/res/relx-v3.27.0 -RELX_OPTS ?= RELX_OUTPUT_DIR ?= _rel RELX_REL_EXT ?= RELX_TAR ?= 1 @@ -7129,16 +3743,10 @@ ifdef SFX RELX_TAR = 1 endif -ifeq ($(firstword $(RELX_OPTS)),-o) - RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) -else - RELX_OPTS += -o $(RELX_OUTPUT_DIR) -endif - # Core targets. ifeq ($(IS_DEP),) -ifneq ($(wildcard $(RELX_CONFIG)),) +ifneq ($(wildcard $(RELX_CONFIG))$(wildcard $(RELX_CONFIG_SCRIPT)),) rel:: relx-rel relup:: relx-relup @@ -7149,21 +3757,81 @@ distclean:: distclean-relx-rel # Plugin-specific targets. -$(RELX): | $(ERLANG_MK_TMP) - $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) - $(verbose) chmod +x $(RELX) +define relx_get_config.erl + (fun() -> + Config0 = + case file:consult("$(call core_native_path,$(RELX_CONFIG))") of + {ok, Terms} -> + Terms; + {error, _} -> + [] + end, + case filelib:is_file("$(call core_native_path,$(RELX_CONFIG_SCRIPT))") of + true -> + Bindings = erl_eval:add_binding('CONFIG', Config0, erl_eval:new_bindings()), + {ok, Config1} = file:script("$(call core_native_path,$(RELX_CONFIG_SCRIPT))", Bindings), + Config1; + false -> + Config0 + end + end)() +endef + +define relx_release.erl + Config = $(call relx_get_config.erl), + {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), + Vsn = case Vsn0 of + {cmd, Cmd} -> os:cmd(Cmd); + semver -> ""; + {semver, _} -> ""; + {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n"); + {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n"); + VsnStr -> Vsn0 + end, + {ok, _} = relx:build_release(#{name => Name, vsn => Vsn}, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]), + halt(0). +endef + +define relx_tar.erl + Config = $(call relx_get_config.erl), + {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), + Vsn = case Vsn0 of + {cmd, Cmd} -> os:cmd(Cmd); + semver -> ""; + {semver, _} -> ""; + {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n"); + {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n"); + VsnStr -> Vsn0 + end, + {ok, _} = relx:build_tar(#{name => Name, vsn => Vsn}, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]), + halt(0). +endef + +define relx_relup.erl + Config = $(call relx_get_config.erl), + {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), + Vsn = case Vsn0 of + {cmd, Cmd} -> os:cmd(Cmd); + semver -> ""; + {semver, _} -> ""; + {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n"); + {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n"); + VsnStr -> Vsn0 + end, + {ok, _} = relx:build_relup(Name, Vsn, undefined, Config ++ [{output_dir, "$(RELX_OUTPUT_DIR)"}]), + halt(0). +endef -relx-rel: $(RELX) rel-deps app - $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release +relx-rel: rel-deps app + $(call erlang,$(call relx_release.erl),-pa ebin/) $(verbose) $(MAKE) relx-post-rel -ifeq ($(RELX_TAR),1) - $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar -endif + $(if $(filter-out 0,$(RELX_TAR)),$(call erlang,$(call relx_tar.erl),-pa ebin/)) -relx-relup: $(RELX) rel-deps app - $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release +relx-relup: rel-deps app + $(call erlang,$(call relx_release.erl),-pa ebin/) $(MAKE) relx-post-rel - $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar) + $(call erlang,$(call relx_relup.erl),-pa ebin/) + $(if $(filter-out 0,$(RELX_TAR)),$(call erlang,$(call relx_tar.erl),-pa ebin/)) distclean-relx-rel: $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) @@ -7174,17 +3842,19 @@ relx-post-rel:: # Run target. -ifeq ($(wildcard $(RELX_CONFIG)),) +ifeq ($(wildcard $(RELX_CONFIG))$(wildcard $(RELX_CONFIG_SCRIPT)),) run:: else define get_relx_release.erl - {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"), + Config = $(call relx_get_config.erl), {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), Vsn = case Vsn0 of {cmd, Cmd} -> os:cmd(Cmd); semver -> ""; {semver, _} -> ""; + {git, short} -> string:trim(os:cmd("git rev-parse --short HEAD"), both, "\n"); + {git, long} -> string:trim(os:cmd("git rev-parse HEAD"), both, "\n"); VsnStr -> Vsn0 end, Extended = case lists:keyfind(extended_start_script, 1, Config) of @@ -7204,6 +3874,7 @@ ifeq ($(PLATFORM),msys2) RELX_REL_EXT := .cmd endif +run:: RELX_TAR := 0 run:: all $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD) @@ -7211,7 +3882,7 @@ ifdef RELOAD rel:: $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \ - eval "io:format(\"~p~n\", [c:lm()])" + eval "io:format(\"~p~n\", [c:lm()])." endif help:: @@ -7220,6 +3891,7 @@ help:: " run Compile the project, build the release and run it" endif +endif # Copyright (c) 2015-2016, Loïc Hoguin <[email protected]> # Copyright (c) 2014, M Robert Martin <[email protected]> @@ -7368,8 +4040,8 @@ define triq_check.erl end of true -> halt(0); _ -> halt(1) - catch error:undef -> - io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), + catch error:undef$(if $V,:Stacktrace) -> + io:format("Undefined property or module?~n$(if $V,~p~n)", [$(if $V,Stacktrace)]), halt(0) end. endef @@ -7391,45 +4063,224 @@ triq: test-build cover-data-dir endif endif -# Copyright (c) 2016, Loïc Hoguin <[email protected]> -# Copyright (c) 2015, Erlang Solutions Ltd. +# Copyright (c) 2022, Loïc Hoguin <[email protected]> # This file is part of erlang.mk and subject to the terms of the ISC License. -.PHONY: xref distclean-xref +.PHONY: xref # Configuration. -ifeq ($(XREF_CONFIG),) - XREFR_ARGS := -else - XREFR_ARGS := -c $(XREF_CONFIG) -endif +# We do not use locals_not_used or deprecated_function_calls +# because the compiler will error out by default in those +# cases with Erlang.mk. Deprecated functions may make sense +# in some cases but few libraries define them. We do not +# use exports_not_used by default because it hinders more +# than it helps library projects such as Cowboy. Finally, +# undefined_functions provides little that undefined_function_calls +# doesn't already provide, so it's not enabled by default. +XREF_CHECKS ?= [undefined_function_calls] + +# Instead of predefined checks a query can be evaluated +# using the Xref DSL. The $q variable is used in that case. + +# The scope is a list of keywords that correspond to +# application directories, being essentially an easy way +# to configure which applications to analyze. With: +# +# - app: . +# - apps: $(ALL_APPS_DIRS) +# - deps: $(ALL_DEPS_DIRS) +# - otp: Built-in Erlang/OTP applications. +# +# The default is conservative (app) and will not be +# appropriate for all types of queries (for example +# application_call requires adding all applications +# that might be called or they will not be found). +XREF_SCOPE ?= app # apps deps otp + +# If the above is not enough, additional application +# directories can be configured. +XREF_EXTRA_APP_DIRS ?= -XREFR ?= $(CURDIR)/xrefr -export XREFR +# As well as additional non-application directories. +XREF_EXTRA_DIRS ?= -XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr +# Erlang.mk supports -ignore_xref([...]) with forms +# {M, F, A} | {F, A} | M, the latter ignoring whole +# modules. Ignores can also be provided project-wide. +XREF_IGNORE ?= [] + +# All callbacks may be ignored. Erlang.mk will ignore +# them automatically for exports_not_used (unless it +# is explicitly disabled by the user). +XREF_IGNORE_CALLBACKS ?= # Core targets. help:: $(verbose) printf '%s\n' '' \ 'Xref targets:' \ - ' xref Run Xrefr using $$XREF_CONFIG as config file if defined' - -distclean:: distclean-xref + ' xref Analyze the project using Xref' \ + ' xref q=QUERY Evaluate an Xref query' # Plugin-specific targets. -$(XREFR): - $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) - $(verbose) chmod +x $(XREFR) - -xref: deps app $(XREFR) - $(gen_verbose) $(XREFR) $(XREFR_ARGS) +define xref.erl + {ok, Xref} = xref:start([]), + Scope = [$(call comma_list,$(XREF_SCOPE))], + AppDirs0 = [$(call comma_list,$(foreach d,$(XREF_EXTRA_APP_DIRS),"$d"))], + AppDirs1 = case lists:member(otp, Scope) of + false -> AppDirs0; + true -> + RootDir = code:root_dir(), + AppDirs0 ++ [filename:dirname(P) || P <- code:get_path(), lists:prefix(RootDir, P)] + end, + AppDirs2 = case lists:member(deps, Scope) of + false -> AppDirs1; + true -> [$(call comma_list,$(foreach d,$(ALL_DEPS_DIRS),"$d"))] ++ AppDirs1 + end, + AppDirs3 = case lists:member(apps, Scope) of + false -> AppDirs2; + true -> [$(call comma_list,$(foreach d,$(ALL_APPS_DIRS),"$d"))] ++ AppDirs2 + end, + AppDirs = case lists:member(app, Scope) of + false -> AppDirs3; + true -> ["../$(notdir $(CURDIR))"|AppDirs3] + end, + [{ok, _} = xref:add_application(Xref, AppDir, [{builtins, true}]) || AppDir <- AppDirs], + ExtraDirs = [$(call comma_list,$(foreach d,$(XREF_EXTRA_DIRS),"$d"))], + [{ok, _} = xref:add_directory(Xref, ExtraDir, [{builtins, true}]) || ExtraDir <- ExtraDirs], + ok = xref:set_library_path(Xref, code:get_path() -- (["ebin", "."] ++ AppDirs ++ ExtraDirs)), + Checks = case {$1, is_list($2)} of + {check, true} -> $2; + {check, false} -> [$2]; + {query, _} -> [$2] + end, + FinalRes = [begin + IsInformational = case $1 of + query -> true; + check -> + is_tuple(Check) andalso + lists:member(element(1, Check), + [call, use, module_call, module_use, application_call, application_use]) + end, + {ok, Res0} = case $1 of + check -> xref:analyze(Xref, Check); + query -> xref:q(Xref, Check) + end, + Res = case IsInformational of + true -> Res0; + false -> + lists:filter(fun(R) -> + {Mod, InMFA, MFA} = case R of + {InMFA0 = {M, _, _}, MFA0} -> {M, InMFA0, MFA0}; + {M, _, _} -> {M, R, R} + end, + Attrs = try + Mod:module_info(attributes) + catch error:undef -> + [] + end, + InlineIgnores = lists:flatten([ + [case V of + M when is_atom(M) -> {M, '_', '_'}; + {F, A} -> {Mod, F, A}; + _ -> V + end || V <- Values] + || {ignore_xref, Values} <- Attrs]), + BuiltinIgnores = [ + {eunit_test, wrapper_test_exported_, 0} + ], + DoCallbackIgnores = case {Check, "$(strip $(XREF_IGNORE_CALLBACKS))"} of + {exports_not_used, ""} -> true; + {_, "0"} -> false; + _ -> true + end, + CallbackIgnores = case DoCallbackIgnores of + false -> []; + true -> + Behaviors = lists:flatten([ + [BL || {behavior, BL} <- Attrs], + [BL || {behaviour, BL} <- Attrs] + ]), + [{Mod, CF, CA} || B <- Behaviors, {CF, CA} <- B:behaviour_info(callbacks)] + end, + WideIgnores = if + is_list($(XREF_IGNORE)) -> + [if is_atom(I) -> {I, '_', '_'}; true -> I end + || I <- $(XREF_IGNORE)]; + true -> [$(XREF_IGNORE)] + end, + Ignores = InlineIgnores ++ BuiltinIgnores ++ CallbackIgnores ++ WideIgnores, + not (lists:member(InMFA, Ignores) + orelse lists:member(MFA, Ignores) + orelse lists:member({Mod, '_', '_'}, Ignores)) + end, Res0) + end, + case Res of + [] -> ok; + _ when IsInformational -> + case Check of + {call, {CM, CF, CA}} -> + io:format("Functions that ~s:~s/~b calls:~n", [CM, CF, CA]); + {use, {CM, CF, CA}} -> + io:format("Function ~s:~s/~b is called by:~n", [CM, CF, CA]); + {module_call, CMod} -> + io:format("Modules that ~s calls:~n", [CMod]); + {module_use, CMod} -> + io:format("Module ~s is used by:~n", [CMod]); + {application_call, CApp} -> + io:format("Applications that ~s calls:~n", [CApp]); + {application_use, CApp} -> + io:format("Application ~s is used by:~n", [CApp]); + _ when $1 =:= query -> + io:format("Query ~s returned:~n", [Check]) + end, + [case R of + {{InM, InF, InA}, {M, F, A}} -> + io:format("- ~s:~s/~b called by ~s:~s/~b~n", + [M, F, A, InM, InF, InA]); + {M, F, A} -> + io:format("- ~s:~s/~b~n", [M, F, A]); + ModOrApp -> + io:format("- ~s~n", [ModOrApp]) + end || R <- Res], + ok; + _ -> + [case {Check, R} of + {undefined_function_calls, {{InM, InF, InA}, {M, F, A}}} -> + io:format("Undefined function ~s:~s/~b called by ~s:~s/~b~n", + [M, F, A, InM, InF, InA]); + {undefined_functions, {M, F, A}} -> + io:format("Undefined function ~s:~s/~b~n", [M, F, A]); + {locals_not_used, {M, F, A}} -> + io:format("Unused local function ~s:~s/~b~n", [M, F, A]); + {exports_not_used, {M, F, A}} -> + io:format("Unused exported function ~s:~s/~b~n", [M, F, A]); + {deprecated_function_calls, {{InM, InF, InA}, {M, F, A}}} -> + io:format("Deprecated function ~s:~s/~b called by ~s:~s/~b~n", + [M, F, A, InM, InF, InA]); + {deprecated_functions, {M, F, A}} -> + io:format("Deprecated function ~s:~s/~b~n", [M, F, A]); + _ -> + io:format("~p: ~p~n", [Check, R]) + end || R <- Res], + error + end + end || Check <- Checks], + stopped = xref:stop(Xref), + case lists:usort(FinalRes) of + [ok] -> halt(0); + _ -> halt(1) + end +endef -distclean-xref: - $(gen_verbose) rm -rf $(XREFR) +xref: deps app +ifdef q + $(verbose) $(call erlang,$(call xref.erl,query,"$q"),-pa ebin/) +else + $(verbose) $(call erlang,$(call xref.erl,check,$(XREF_CHECKS)),-pa ebin/) +endif # Copyright (c) 2016, Loïc Hoguin <[email protected]> # Copyright (c) 2015, Viktor Söderqvist <[email protected]> @@ -7441,6 +4292,7 @@ COVER_DATA_DIR ?= $(COVER_REPORT_DIR) ifdef COVER COVER_APPS ?= $(notdir $(ALL_APPS_DIRS)) COVER_DEPS ?= +COVER_EXCLUDE_MODS ?= endif # Code coverage for Common Test. @@ -7456,7 +4308,8 @@ $(TEST_DIR)/ct.cover.spec: cover-data-dir "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \ $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \ $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \ - '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@ + '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' \ + "{excl_mods, '$(PROJECT)', [$(call comma_list,$(COVER_EXCLUDE_MODS))]}." > $@ CT_RUN += -cover $(TEST_DIR)/ct.cover.spec endif @@ -7471,14 +4324,18 @@ define cover.erl Dirs = ["$(call core_native_path,$(CURDIR)/ebin)" $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)") $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")], - [begin - case filelib:is_dir(Dir) of - false -> false; - true -> - case cover:compile_beam_directory(Dir) of - {error, _} -> halt(1); - _ -> true - end + Excludes = [$(call comma_list,$(foreach e,$(COVER_EXCLUDE_MODS),"$e"))], + [case file:list_dir(Dir) of + {error, enotdir} -> false; + {error, _} -> halt(2); + {ok, Files} -> + BeamFiles = [filename:join(Dir, File) || + File <- Files, + not lists:member(filename:basename(File, ".beam"), Excludes), + filename:extension(File) =:= ".beam"], + case cover:compile_beam(BeamFiles) of + {error, _} -> halt(1); + _ -> true end end || Dir <- Dirs] end, @@ -7775,14 +4632,14 @@ list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: QUERY ?= name fetch_method repo version define query_target -$(1): $(2) clean-tmp-query.log +$1: $2 clean-tmp-query.log ifeq ($(IS_APP)$(IS_DEP),) - $(verbose) rm -f $(4) + $(verbose) rm -f $4 endif - $(verbose) $(foreach dep,$(3),\ - echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;) - $(if $(filter-out query-deps,$(1)),,\ - $(verbose) set -e; for dep in $(3) ; do \ + $(verbose) $(foreach dep,$3,\ + echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $4 ;) + $(if $(filter-out query-deps,$1),,\ + $(verbose) set -e; for dep in $3 ; do \ if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \ :; \ else \ @@ -7791,8 +4648,8 @@ endif fi \ done) ifeq ($(IS_APP)$(IS_DEP),) - $(verbose) touch $(4) - $(verbose) cat $(4) + $(verbose) touch $4 + $(verbose) cat $4 endif endef diff --git a/examples/chunked_hello_world/Makefile b/examples/chunked_hello_world/Makefile index 7e14a82..e168cf8 100644 --- a/examples/chunked_hello_world/Makefile +++ b/examples/chunked_hello_world/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/compress_response/Makefile b/examples/compress_response/Makefile index 8eab604..a798d8c 100644 --- a/examples/compress_response/Makefile +++ b/examples/compress_response/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/cookie/Makefile b/examples/cookie/Makefile index 563cd87..4ed1a37 100644 --- a/examples/cookie/Makefile +++ b/examples/cookie/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy erlydtl dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/echo_get/Makefile b/examples/echo_get/Makefile index 6332d4b..7be52c2 100644 --- a/examples/echo_get/Makefile +++ b/examples/echo_get/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/echo_post/Makefile b/examples/echo_post/Makefile index c1d64fe..86e5cd2 100644 --- a/examples/echo_post/Makefile +++ b/examples/echo_post/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/echo_post/src/toppage_h.erl b/examples/echo_post/src/toppage_h.erl index 1c6446a..de3143e 100644 --- a/examples/echo_post/src/toppage_h.erl +++ b/examples/echo_post/src/toppage_h.erl @@ -16,13 +16,13 @@ maybe_echo(<<"POST">>, true, Req0) -> Echo = proplists:get_value(<<"echo">>, PostVals), echo(Echo, Req); maybe_echo(<<"POST">>, false, Req) -> - cowboy_req:reply(400, [], <<"Missing body.">>, Req); + cowboy_req:reply(400, #{}, <<"Missing body.">>, Req); maybe_echo(_, _, Req) -> %% Method not allowed. cowboy_req:reply(405, Req). echo(undefined, Req) -> - cowboy_req:reply(400, [], <<"Missing echo parameter.">>, Req); + cowboy_req:reply(400, #{}, <<"Missing echo parameter.">>, Req); echo(Echo, Req) -> cowboy_req:reply(200, #{ <<"content-type">> => <<"text/plain; charset=utf-8">> diff --git a/examples/eventsource/Makefile b/examples/eventsource/Makefile index 1b67b8e..636337a 100644 --- a/examples/eventsource/Makefile +++ b/examples/eventsource/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/file_server/Makefile b/examples/file_server/Makefile index 7efa6ef..12a4e15 100644 --- a/examples/file_server/Makefile +++ b/examples/file_server/Makefile @@ -2,7 +2,9 @@ PROJECT = file_server PROJECT_DESCRIPTION = Cowboy file server example with directory listing PROJECT_VERSION = 1 -DEPS = cowboy jsx +DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/file_server/priv/中文/中文.html b/examples/file_server/priv/中文/中文.html new file mode 100644 index 0000000..43ca75f --- /dev/null +++ b/examples/file_server/priv/中文/中文.html @@ -0,0 +1,8 @@ +<html> + <head> + <meta charset='utf-8'> + </head> + <body> + 中文! + </body> +</html> diff --git a/examples/file_server/src/directory_h.erl b/examples/file_server/src/directory_h.erl index 66e1466..b52fc74 100644 --- a/examples/file_server/src/directory_h.erl +++ b/examples/file_server/src/directory_h.erl @@ -8,6 +8,7 @@ -export([allowed_methods/2]). -export([resource_exists/2]). -export([content_types_provided/2]). +-export([charsets_provided/2]). %% Callback Callbacks -export([list_json/2]). @@ -31,12 +32,15 @@ content_types_provided(Req, State) -> {{<<"application">>, <<"json">>, []}, list_json} ], Req, State}. +charsets_provided(Req, State) -> + {[<<"utf-8">>], Req, State}. + list_json(Req, {Path, Fs}) -> - Files = [ <<(list_to_binary(F))/binary>> || F <- Fs ], - {jsx:encode(Files), Req, Path}. + Files = [unicode:characters_to_binary(F) || F <- Fs], + {json:encode(Files), Req, Path}. list_html(Req, {Path, Fs}) -> - Body = [[ links(Path, F) || F <- [".."|Fs] ]], + Body = [[links(Path, unicode:characters_to_binary(F)) || F <- [".."|Fs]]], HTML = [<<"<!DOCTYPE html><html><head><title>Index</title></head>", "<body>">>, Body, <<"</body></html>\n">>], {HTML, Req, Path}. diff --git a/examples/file_server/src/file_server_app.erl b/examples/file_server/src/file_server_app.erl index 17e73b2..0ba8f6c 100644 --- a/examples/file_server/src/file_server_app.erl +++ b/examples/file_server/src/file_server_app.erl @@ -15,7 +15,8 @@ start(_Type, _Args) -> {'_', [ {"/[...]", cowboy_static, {priv_dir, file_server, "", [ {mimetypes, cow_mimetypes, all}, - {dir_handler, directory_h} + {dir_handler, directory_h}, + {charset, <<"utf-8">>} ]}} ]} ]), diff --git a/examples/hello_world/Makefile b/examples/hello_world/Makefile index c410b09..a56432a 100644 --- a/examples/hello_world/Makefile +++ b/examples/hello_world/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/markdown_middleware/Makefile b/examples/markdown_middleware/Makefile index ccbd3c2..f5ea20e 100644 --- a/examples/markdown_middleware/Makefile +++ b/examples/markdown_middleware/Makefile @@ -2,7 +2,10 @@ PROJECT = markdown_middleware PROJECT_DESCRIPTION = Cowboy static file handler example with middleware component PROJECT_VERSION = 1 -DEPS = cowboy +DEPS = cowboy markdown dep_cowboy_commit = master +dep_markdown = git https://github.com/hypernumbers/erlmarkdown master + +REL_DEPS = relx include ../../erlang.mk diff --git a/examples/markdown_middleware/src/erlmarkdown.erl b/examples/markdown_middleware/src/erlmarkdown.erl deleted file mode 100644 index 6cab50d..0000000 --- a/examples/markdown_middleware/src/erlmarkdown.erl +++ /dev/null @@ -1,1581 +0,0 @@ -%%% -*- mode: Erlang; fill-column: 80; comment-column: 75; -*- -%%% Copyright 2012 Erlware, LLC. All Rights Reserved. -%%% -%%% This file is provided to you under the Apache License, -%%% Version 2.0 (the "License"); you may not use this file -%%% except in compliance with the License. You may obtain -%%% a copy of the License at -%%% -%%% http://www.apache.org/licenses/LICENSE-2.0 -%%% -%%% Unless required by applicable law or agreed to in writing, -%%% software distributed under the License is distributed on an -%%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -%%% KIND, either express or implied. See the License for the -%%% specific language governing permissions and limitations -%%% under the License. -%%%--------------------------------------------------------------------------- -%%% @copyright (C) Erlware, LLC. -%%% @copyright (C) 2009, Gordon Guthrie -%%% @doc --module(erlmarkdown). - --export([conv/1, - conv_utf8/1, - conv_file/2]). - --define(SPACE, 32). --define(TAB, 9). --define(LF, 10). --define(CR, 13). --define(NBSP, 160). --define(AMP, $&, $a, $m, $p, $;). --define(COPY, $&, $c, $o, $p, $y, $;). - -%%============================================================================ -%% API -%%============================================================================ - -%% the lexer first lexes the input -%% make_lines does 2 passes: -%% * it chops the lexed strings into lines which it represents as a -%% list of lists -%% * it then types the lines into the following: -%% * normal lines -%% * reference style links -%% * reference style images -%% * special line types -%% - blank -%% - SETEXT header lines -%% - ATX header lines -%% - blockquote -%% - unordered lists -%% - ordered lists -%% - code blocks -%% - horizontal rules -%% the parser then does its magic interpolating the references as appropriate -conv(String) -> - Lex = lex(String), - UntypedLines = make_lines(Lex), - {TypedLines, Refs} = type_lines(UntypedLines), - parse(TypedLines, Refs). - --spec conv_utf8(list()) -> list(). -conv_utf8(Utf8) -> - Str = xmerl_ucs:from_utf8(Utf8), - Res = conv(Str), - xmerl_ucs:to_utf8(Res). - -conv_file(FileIn, FileOut) -> - case file:open(FileIn, [read]) of - {ok, Device} -> - Input = get_all_lines(Device,[]), - Output = conv(Input), - write(FileOut, Output); - _ -> - error - end. - -%%============================================================================ -%% Internal Functions -%%============================================================================ - -get_all_lines(Device, Accum) -> - case io:get_line(Device,"") of - eof -> - file:close(Device), - Accum; - Line -> - get_all_lines(Device,Accum ++ Line) - end. - -write(File, Text) -> - _Return=filelib:ensure_dir(File), - case file:open(File, [write]) of - {ok, Id} -> - io:fwrite(Id, "~s~n", [Text]), - file:close(Id); - _ -> - error - end. - - -%% -%% Parse the lines interpolating the references as appropriate -%% - -parse(TypedLines, Refs) -> - string:strip(p1(TypedLines, Refs, 0, []), both, $\n). - -%% goes through the lines -%% Variable 'R' contains the References and 'I' is the indent level - -%% Terminal clause -p1([], _R, _I, Acc) -> - lists:flatten(lists:reverse(Acc)); - -%% Tags have the highest precedence... -p1([{tag, Tag} | T], R, I, Acc) -> - case T of - [] -> - p1([], R, I, ["</p>", make_tag_str(Tag, R), "<p>" | Acc]); - [{blank, _} | T2] -> - p1(T2, R, I, [make_tag_str(Tag, R) | Acc]); - _Other -> - p1(T, R, I, [pad(I) ++ make_tag_str(Tag, R) | Acc]) - end; - -p1([{blocktag, [{{{tag, open}, Type}, Tg}] = _Tag} | T], R, I, Acc) -> - {Block, Rest} = grab_for_blockhtml(T, Type, []), - Str = lists:flatten([Tg, "\n" | Block]), - p1(Rest, R, I, [Str | Acc]); - -%% blank lines/linefeeds are gobbled down -p1([{Type, _} | T], R, I, Acc) - when Type == blank orelse Type == linefeed -> - Rest = grab_empties(T), - p1(Rest, R, I, [pad(I) ++ "\n" | Acc]); - -%% two consecutive normal lines should be concatenated... -%% remembering the pad the second line with the indent... -p1([{normal, P1}, {normal, P2} | T], R, I, Acc) -> - p1([{normal, merge(P1, pad(I), P2)} | T], R, I, Acc); -%% as should a normal and linefeed - -%% setext h1 is a look behind and it overrides blockquote and code... -p1([{normal, P}, {setext_h1, _} | T], R, I, Acc) -> - p1(T, R, I, [pad(I) ++ "<h1>" ++ make_string(snip(P), R) - ++ "</h1>\n\n" | Acc]); -p1([{blockquote, P}, {setext_h1, _} | T], R, I, Acc) -> - p1(T, R, I, [pad(I) ++ "<h1>" ++ make_string(snip(P), R) - ++ "</h1>\n\n" | Acc]); -p1([{{codeblock, P}, _}, {setext_h1, _} | T], R, I, Acc) -> - p1(T, R, I, [pad(I) ++ "<h1>" ++ make_string(snip(P), R) - ++ "</h1>\n\n" | Acc]); -p1([{blockquote, P}, {h2_or_hr, _} | T], R, I, Acc) -> - p1(T, R, I, [pad(I) ++ "<h2>" ++ make_string(snip(P), R) - ++ "</h2>\n\n" | Acc]); -p1([{{codeblock, P}, _}, {h2_or_hr, _} | T], R, I, Acc) -> - p1(T, R, I, [pad(I) ++ "<h2>" ++ make_string(snip(P), R) - ++ "</h2>\n\n" | Acc]); - -%% but a setext with no lookbehind is just rendered as a normal line, -%% so change its type and rethrow it -p1([{setext_h1, P} | T], R, I, Acc) -> - p1([{normal, P} | T], R, I, Acc); - -%% setext h2 might be a look behind -p1([{normal, P}, {h2_or_hr, _} | T], R, I, Acc) -> - P2 = string:strip(make_string(snip(P), R), both, ?SPACE), - p1(T, R, I, [pad(I) ++ "<h2>" ++ P2 ++ "</h2>\n\n" | Acc]); - -%% blockquotes swallow each other -%% replace the first blockquote mark with a space... -p1([{blockquote, P1}, {blockquote, [_ | P2]} | T], R, I, Acc) -> - p1([{blockquote, merge(P1, pad(I), [{{ws, sp}, " "} | P2])} | T], R, I, Acc); -%% blockquotes swallow normal -p1([{blockquote, P1}, {normal, P2} | T], R, I, Acc) -> - p1([{blockquote, merge(P1, pad(I + 1), P2)} | T], R, I, Acc); -%% blockquote -p1([{blockquote, P} | T], R, I, Acc) -> - [{{md, gt}, _} | T1] = P, - T2 = string:strip(make_string(T1, R)), - p1(T, R, I, - ["\n<blockquote>\n" ++ pad(I + 1) ++ "<p>" ++ T2 ++ "</p>\n</blockquote>" | Acc]); - -%% one normal is just normal... -p1([{normal, P} | T], R, I, Acc) -> - P2 = string:strip(make_string(snip(P), R), both, ?SPACE), - p1(T, R, I, [pad(I) ++ "<p>" ++ P2 ++ "</p>\n" | Acc]); - -%% atx headings -p1([{{h1, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h1>" ++ NewP ++ "</h1>\n\n" | Acc]); -p1([{{h2, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h2>" ++ NewP ++ "</h2>\n\n" | Acc]); -p1([{{h3, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h3>" ++ NewP ++ "</h3>\n\n" | Acc]); -p1([{{h4, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h4>" ++ NewP ++ "</h4>\n\n" | Acc]); -p1([{{h5, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h5>" ++ NewP ++ "</h5>\n\n" | Acc]); -p1([{{h6, P}, _} | T], R, I, Acc) -> - NewP = string:strip(make_string(snip(P), R), right), - p1(T, R, I, [pad(I) ++ "<h6>" ++ NewP ++ "</h6>\n\n" | Acc]); - -%% unordered lists swallow normal and codeblock lines -p1([{{ul, P1}, S1}, {{normal, P2}, S2} | T], R, I , Acc) -> - p1([{{ul, merge(P1, pad(I), P2)}, S1 ++ S2} | T], R, I, Acc); -p1([{{ul, P1}, S1}, {{codeblock, P2}, S2} | T], R, I , Acc) -> - p1([{{ul, merge(P1, pad(I), P2)}, S1 ++ S2} | T], R, I, Acc); -p1([{{ul, _P}, _} | _T] = List, R, I, Acc) -> - {Rest, NewAcc} = parse_list(ul, List, R, I, [], false), - p1(Rest, R, I, [pad(I) ++ "<ul>\n" ++ NewAcc - ++ pad(I) ++ "</ul>\n" | Acc]); - -%% ordered lists swallow normal and codeblock lines -p1([{{ol, P1}, S1}, {{normal, P2}, S2} | T], R, I , Acc) -> - p1([{{ol, merge(P1, pad(I), P2)}, S1 ++ S2} | T], R, I, Acc); -p1([{{ol, P1}, S1}, {{codeblock, P2}, S2} | T], R, I , Acc) -> - p1([{{ol, merge(P1, pad(I), P2)}, S1 ++ S2} | T], R, I, Acc); -p1([{{ol, _P}, _} | _T] = List, R, I, Acc) -> - {Rest, NewAcc} = parse_list(ol, List, R, I, [], false), - p1(Rest, R, I, [pad(I) ++ "<ol>\n" ++ NewAcc - ++ pad(I) ++ "</ol>\n" | Acc]); - -%% codeblock consumes any following empty lines -%% and other codeblocks -p1([{{codeblock, P1}, S1}, {{codeblock, P2}, S2} | T], R, I, Acc) -> - p1([{{codeblock, merge(P1, pad(I), P2)}, S1 ++ S2} | T], R, I, Acc); -p1([{{codeblock, P}, _} | T], R, I, Acc) -> - Rest = grab_empties(T), - p1(Rest, R, I, ["<pre><code>" ++ make_string(snip(P), R) - ++ "\n</code></pre>\n\n" | Acc]); - -%% horizontal rules -p1([{hr, _} | T], R, I, Acc) -> - p1(T, R, I, ["<hr />" | Acc]); -%% h2_or_hr is greedy for normal lines -p1([{h2_or_hr, P1}, {normal, P2} | T], R, I, Acc) -> - p1([{normal, lists:flatten([P1 | P2])} | T], R, I, Acc); -%% the clause with a normal before an 'h2_or_hr' has already been -%% handled further up the tree, so this is a bona fide 'hr'... -p1([{h2_or_hr, _} | T], R, I, Acc) -> - p1(T, R, I, ["<hr />" | Acc]); - -%% Now start pulling out inline refs etc, etc -p1([{inlineref, _P} | T], R, I, Acc) -> - p1(T, R, I, Acc). - -grab_for_blockhtml([], Type, Acc) -> - {lists:reverse(["</" ++ Type ++ ">" | Acc]), []}; -grab_for_blockhtml([{blocktag, [{{{tag, close}, Type}, Tg}]} - | T], Type, Acc) -> - {lists:reverse([Tg | Acc]), T}; -grab_for_blockhtml([{blocktag, [{{{tag, _}, GrabType}, Tg}]} - | T], Type, Acc) when GrabType =/= Type -> - %% blocktags grabbed in a blocktag need a line ending pushed - grab_for_blockhtml(T, Type, ["\n", Tg | Acc]); -grab_for_blockhtml([{tag, {{{tag, self_closing}, _Ty}, Tg}} - | T], Type, Acc) -> - grab_for_blockhtml(T, Type, [Tg | Acc]); -grab_for_blockhtml([H | T], Type, Acc) -> - {_Type, Content} = H, - Str = make_plain_string(Content), - grab_for_blockhtml(T, Type, [Str | Acc]). - -grab_empties([{linefeed, _} | T]) -> grab_empties(T); -grab_empties([{blank, _} | T]) -> grab_empties(T); -grab_empties(List) -> List. - -merge(P1, Pad, P2) -> - NewP1 = make_br(P1), - lists:flatten([NewP1, {string, Pad} | P2]). - -make_br(List) -> make_br1(lists:reverse(List)). - -make_br1([{{lf, _}, _}, - {{ws, comp}, _} | T]) -> lists:reverse([{tags, " <br />\n"} | T]); -make_br1([{{lf, _}, _}, - {{ws, tab}, _} | T]) -> lists:reverse([{tags, " <br />\n"} | T]); -make_br1(List) -> lists:reverse(List). - -pad(N) -> - pad1(N, []). - -pad1(0, Acc) -> - Acc; -pad1(N, Acc) - when N > 0 -> - pad1(N - 1, [" " | Acc]). - -%% this is a bit messy because of the way that hard lines are treated... -%% If your li's have a blank line between them the item gets wrapped in a para, -%% if not, they don't -%% BUT if one item is <p> wrapped then the next is too -parse_list(_Type, [], _R, _I, A, _) -> - {[], lists:reverse(A)}; -parse_list(Type, [{{Type, P}, _} | T], R, I, A, Wrap) -> - {Rest, NewP, NewWrap} = grab(T, R, [], Wrap), - Li = - case NewWrap of - false -> - Ret = parse([{normal, P}], R), - %% need to strip off the extra <p></p>'s - Ret2 = string:left(Ret, length(Ret) - 4), - Ret3 = string:right(Ret2, length(Ret2) -3), - Ret3 ++ "\n" ++ NewP ++ pad(I); - true -> - string:strip(parse([{normal, P}], R), right, ?LF) - ++ NewP ++ pad(I) - end, - NewWrap2 = - case T of - [] -> - false; % doesnt matter - [H2 | _T2] -> - case H2 of - {linefeed, _} -> - true; - _ -> - false - end - end, - parse_list(Type, Rest, R, I, [pad(I) ++ "<li>" - ++ string:strip(Li, right, ?LF) - ++ "</li>\n" | A], NewWrap2); -parse_list(_Type, List, _R, _I, A, _) -> - {List, lists:reverse(A)}. - -%% grab grabs normals, double codeblocks, linefeeds and blanks -%% BUT stop grabbing if a normal if preceeded by a linefeed or blank -%% UNLESS the normal starts with white space :( -%% the third return parameter is 'true' if the 'li' should be -%% wrapped in '<p></p>' and false if it shouldn't -grab([{{codeblock, _}, S} | T] = List, R, Acc, W) -> - case is_blockquote(S, T) of - {{true, R1}, T2} -> - grab(T2, R, ["</blockquote>", - make_escape_string(R1, R), - "<blockquote>" | Acc], W); - {{esc_false, R1}, _T2} -> - {R1, lists:reverse(Acc), false}; - {false, T2} -> - case is_double_indent(S) of - false -> - {List, lists:reverse(Acc), false}; - {true, R2} -> - %% if it is a double indent - delete 4 spaces - %% no it makes not sense to me neither :( - grab(T2, R, [" " ++ make_escape_string(R2, R) | Acc], W) - end - end; -grab([{linefeed, _} | T], R, Acc, false) -> - grab2(T, R, Acc, T, Acc, true); -grab([{linefeed, _} | T], R, Acc, true) -> - grab2(T, R, ["\n" | Acc], T, Acc, true); -grab([{blank, _} | T], R, Acc, false) -> - grab2(T, R, Acc, T, Acc, true); -grab([{blank, _} | T], R, Acc, true) -> - grab2(T, R, ["\n" | Acc], T, Acc, true); -grab([{normal, P} | T], R, Acc, W) -> - Li = case W of - false -> - make_escape_string(P, R); - true -> - "<p>"++ string:strip(make_escape_string(P, R), right, ?LF) - ++ "</p>" - end, - grab(T, R, [Li | Acc], W); -grab(List, _R, Acc, W) -> - {List, lists:reverse(Acc), W}. - -%% the problem is knowing when to grab, if the list is followed by a long -%% string of blank lines and linefeeds and a normal then the linefeeds aren't -%% grabbed -%% if the list if followed by blank lines and linefeeds and a normal with an -%% initial whitespace it is grabbed... -grab2([{normal, P2} | T], R, Acc, LO, AO, W) -> - case P2 of - [{{ws, _}, _} | T2] -> - Li = case W of - false -> - make_escape_string(T2, R); - true -> - "<p>" ++ - string:strip(make_escape_string(T2, R), right, ?LF) - ++ "</p>" - end, - grab(T, R, [Li | Acc], W); - _ -> - {LO, AO, false} - end; -grab2([{linefeed, _} | T], R, Acc, LO, AO, _W) -> - grab2(T, R, ["\n" | Acc], LO, AO, true); -grab2([{blank, _} | T], R, Acc, LO, AO, _W) -> - grab2(T, R, ["\n" | Acc], LO, AO, true); -%% We dont want to grab this stuff so return the old list and the old acc -grab2(_List, _R, _Acc, LO, AO, _W) -> - {LO, AO, true}. - -is_double_indent(List) -> - is_double_indent1(List, 0). - -%% double indent is any combination of tabs and spaces that add -%% up to 8 -is_double_indent1([], _N) -> - false; -is_double_indent1(Rest, N) - when N > 7 -> - {true, Rest}; -is_double_indent1([{{ws, sp}, _} | T], N) -> - is_double_indent1(T, N + 1); -is_double_indent1([{{ws, tab}, _} | T], N) -> - is_double_indent1(T, N + 4); -is_double_indent1(_List, _N) -> - false. - -is_blockquote(List, T) -> - case is_bq1(List, 0) of - false -> - {false, T}; - {esc_false, R} -> - {{esc_false, R}, T}; - {true, R} -> - {NewT, NewR} = grab2(T, R), - {{true, NewR}, NewT} - end. - -is_bq1([], _N) -> - false; -is_bq1([{{ws, sp}, _} | T], N) -> - is_bq1(T, N + 1); -is_bq1([{{ws, tab}, _} | T], N) -> - is_bq1(T, N + 4); -is_bq1([{{md, gt}, _}, {{ws, _}, _} | T], N) - when N > 3 -> - {true, T}; -is_bq1([{{punc, bslash}, _}, {{md, gt}, GT}, {{ws, _}, WS} | T], N) - when N > 3 -> - {esc_false, [GT, WS | T]}; -is_bq1(_List, _N) -> - false. - -grab2(List, R) -> - gb2(List, lists:reverse(R)). - -gb2([], Acc) -> - {[], lists:flatten(lists:reverse(Acc))}; -gb2([{blank, _} | T], Acc) -> - {T, lists:flatten(lists:reverse(Acc))}; -gb2([{_Type, P} | T], Acc) -> - gb2(T, [P | Acc]). - -%% -%% Make the lines from the raw tokens -%% -make_lines(Tokens) -> - ml1(Tokens, [], []). - -ml1([], [], A2) -> - lists:reverse(A2); -ml1([], A1, A2) -> - ml1([], [], [lists:reverse(A1) | A2]); -ml1([{{lf, _}, _} = H | T], A1, A2) -> - ml1(T, [], [ml2(H, A1) | A2]); -ml1([H | T], A1, A2) -> - ml1(T, [H | A1], A2). - -ml2(H, List) -> - lists:reverse([H | List]). - -%%% -%%% Process the lines and give each line a type. The valid types are: -%%% * normal line -%%% * reference style links -%%% * reference style images -%%% * special line types -%%% - blank -%%% - SETEXT header lines -%%% - ATX header lines -%%% - unordered lists (including code blocks) -%%% - ordered lists (including code blocks) -%%% - blockquotes -%%% - code blocks -%%% - horizontal rules -%%% -type_lines(Lines) -> - {Refs, TypedLines} = type_lines1(Lines, [], []), - {strip_lines(TypedLines), Refs}. - -type_lines1([], A1, A2) -> - {A1, lists:reverse(A2)}; -type_lines1([[{{ws, sp}, _}, {{inline, open}, _} | T1] = H | T2], A1, A2) -> - %% this clause extracts URL and Image refs - %% (it is the only one that uses A1 and A2... - %% inlines can have up to 3 spaces before it - t_inline(H, T1, T2, A1, A2); -type_lines1([[{{ws, tab}, _}, {{inline, open}, _} | T1] = H | T2], A1, A2) -> - t_inline(H, T1, T2, A1, A2); -type_lines1([[{{ws, comp}, W}, {{inline, open}, _} | T1] = H | T2], A1, A2) -> - case gt(W, 3) of - {true, _R} -> - t_inline(H, T1, T2, A1, A2); - false -> - type_lines1(T1, A1, [{normal , H} | A2]) % same exit at the final clause! - end, - t_inline(H, T1, T2, A1, A2); -type_lines1([[{{inline, open}, _} | T1] = H | T2], A1, A2) -> - t_inline(H, T1, T2, A1, A2); -type_lines1([[{{md, eq}, _} | _T] = H | T], A1, A2) -> - %% types setext lines - type_lines1(T, A1, [type_setext_h1(H) | A2]); -type_lines1([[{{md, dash}, _} | _T] = H | T], A1, A2) -> - %% NOTE 1: generates a ul as the default not a normal line - %% NOTE 2: depending on the context this might generate an <h2> header - %% or an <hr /> - %% NOTE 3: space - is typed to a bullet down in <ul> land... - type_lines1(T, A1, [type_setext_h2(H) | A2]); -type_lines1([[{{md, atx}, _} | _T] = H | T], A1, A2) -> - %% types atx lines - type_lines1(T, A1, [type_atx(H) | A2]); -type_lines1([[{{md, gt}, _} | []] = H | T], A1, A2) -> - %% types blockquotes - %% a blockquote on its own or followed by a linefeed is - %% displayed 'as is' by showdown - type_lines1(T, A1, [{normal, H} | A2]); -type_lines1([[{{md, gt}, _}, {{lf, _}, _} | []] = H | T], A1, A2) -> - type_lines1(T, A1, [{normal, H} | A2]); -%% one with anything after it starts a blockquote -type_lines1([[{{md, gt}, _} | _T1] = H | T], A1, A2) -> - type_lines1(T, A1, [{blockquote, H} | A2]); -type_lines1([[{{ws, _}, _}, {{md, star}, _} = ST1, {{ws, _}, _} = WS1 | T1] = H | T], A1, A2) -> - %% types unordered lists lines - %% NOTE 1: the dashed version is generated in type_setext_h2 - %% NOTE 2: the asterix version also might generate a horizontal rule - %% which is why it jumps to type_star2 <-- note the 2!! - type_lines1(T, A1, [{type_star2([ST1, WS1 | T1]), H} | A2]); -type_lines1([[{{md, star}, _}, {{ws, _}, _} | _T1] = H | T], A1, A2) -> - type_lines1(T, A1, [{type_star2(H), H} | A2]); -type_lines1([[{{ws, _}, _}, {{md, plus}, _}, {{ws, _}, _} = W | T1] = H | T], A1, A2) -> - type_lines1(T, A1, [{{ul, make_list_str([W | T1])}, H} | A2]); -type_lines1([[{{md, plus}, _}, {{ws, _}, _} = W | T1] = H | T], A1, A2) -> - type_lines1(T, A1, [{{ul, make_list_str([W | T1])}, H} | A2]); -%% UL based on dashes -type_lines1([[{{ws, _}, _}, {{md, dash}, _}, {{ws, _}, _} = W | T1] = H | T], A1, A2) -> - type_lines1(T, A1, [{{ul, make_list_str([W | T1])}, H} | A2]); -type_lines1([[{{ws, _}, _}, {num, _} = N1| T1] | T], A1, A2) -> - %% types ordered lists... - type_lines1(T, A1, [type_ol([N1 | T1]) | A2]); -type_lines1([[{num, _} | _T] = H | T], A1, A2) -> - type_lines1(T, A1, [type_ol(H) | A2]); -type_lines1([[{{md, underscore}, _} | _T1] = H | T], A1, A2) -> - %% types horizontal rules for stars and underscores - %% dashes and some stars are done elsewhere... - type_lines1(T, A1, [type_underscore(H) | A2]); -type_lines1([[{{md, star}, _} | _T1] = H | T], A1, A2) -> - type_lines1(T, A1, [type_star(H) | A2]); -type_lines1([[{{{tag, _Type}, Tag}, _ } = H | T1] = List | T], A1, A2) -> - %% Block level tags - these are look ahead they must be - %% on a single line (ie directly followed by a lf and nothing else - case is_blank(T1) of - false -> - type_lines1(T, A1, [{normal , List} | A2]); - true -> - case is_block_tag(Tag) of - true -> - type_lines1(T, A1, [{blocktag , [H]} | A2]); - false -> - type_lines1(T, A1, [{tag, [H | T1]} | A2]) - end - end; -type_lines1([[{{lf, _}, _}| []] = H | T], A1, A2) -> - %% types a blank line or a code block - type_lines1(T, A1, [{linefeed, H} | A2]); -type_lines1([[{{ws, _}, _} | _T1] = H | T], A1, A2) -> - type_lines1(T, A1, [type_ws(H) | A2]); - -%% Final clause... -type_lines1([H | T], A1, A2) -> - type_lines1(T, A1, [{normal , H} | A2]). - -t_inline(H, T1, T2, A1, A2) -> - case snip_ref(T1) of - {Type, {Id, {Url, Title}}} -> - type_lines1(T2, lists:flatten([{Id, {Url, Title}} | A1]), - [{Type, H} | A2]); - normal -> - type_lines1(T2, A1, [{normal, H} | A2]) - end. - -%% strips blanks from the beginning and end -strip_lines(List) -> - lists:reverse(strip_lines1(lists:reverse(strip_lines1(List)))). - -strip_lines1([{linefeed, _} | T]) -> - strip_lines1(T); -strip_lines1([{blank, _} | T]) -> - strip_lines1(T); -strip_lines1(List) -> - List. - -%% -%% Loads of type rules... -%% -is_blank([]) -> - true; -is_blank([{{lf, _}, _} | []]) -> - true; -is_blank([{{ws, _}, _} | T]) -> - is_blank(T); -is_blank(_List) -> - false. - -is_block_tag("address") -> - true; -is_block_tag("blockquote") -> - true; -is_block_tag("center") -> - true; -is_block_tag("dir") -> - true; -is_block_tag("div") -> - true; -is_block_tag("dl") -> - true; -is_block_tag("fieldset") -> - true; -is_block_tag("form") -> - true; -is_block_tag("h1") -> - true; -is_block_tag("h2") -> - true; -is_block_tag("h3") -> - true; -is_block_tag("h4") -> - true; -is_block_tag("h5") -> - true; -is_block_tag("h6") -> - true; -is_block_tag("hr") -> - true; -is_block_tag("isindex") -> - true; -is_block_tag("menu") -> - true; -is_block_tag("noframes") -> - true; -is_block_tag("noscript") -> - true; -is_block_tag("ol") -> - true; -is_block_tag("p") -> - true; -is_block_tag("pre") -> - true; -is_block_tag("table") -> - true; -is_block_tag("thead") -> - true; -is_block_tag("tbody") -> - true; -is_block_tag("tr") -> - true; -is_block_tag("td") -> - true; -is_block_tag("ul") -> - true; -is_block_tag(_Other) -> - false. - -type_underscore(List) -> - case type_underscore1(trim_right(List)) of - hr -> - {hr, List}; - maybe -> - {type_underscore2(List), List} - end. - -type_underscore1([]) -> - hr; -type_underscore1([{{md, underscore}, _} | T]) -> - type_underscore1(T); -type_underscore1(_List) -> - maybe. - -type_underscore2(List) -> - case trim_right(List) of % be permissive of trailing spaces - [{{md, underscore}, _}, {{ws, _}, _}, - {{md, underscore}, _}, {{ws, _}, _}, - {{md, underscore}, _}] -> - hr; - _Other -> - normal - end. - -type_star(List) -> - Trim = trim_right(List), - case type_star1(Trim) of % be permssive of trailing spaces - hr -> - {hr, trim_right(Trim)}; - maybe -> - Type = type_star2(List), - %% if it is a normal line we prepend it with a special - %% non-space filling white space character - case Type of - normal -> - {normal, [{{ws, none}, none} | List]}; - _ -> - {Type, List} - end - end. - -type_star1([]) -> - hr; -type_star1([{{md, star}, _} | T]) -> - type_star1(T); -type_star1(_List) -> - maybe. - -type_star2(List) -> - case trim_right(List) of - [{{md, star}, _}, {{ws, _}, _}, - {{md, star}, _}, {{ws, _}, _}, - {{md, star}, _}] -> - hr; - _Other -> - case List of - [{{md, star}, _}, - {{ws, _}, _}= WS | T] -> - {ul, make_list_str([WS | T])}; - _Other2 -> - normal - end - end. - -type_ol(List) -> - case type_ol1(List, []) of - normal -> - {normal, List}; - {ol, Str} -> - {{ol, Str}, List}; - {esc_normal, Str} -> - {normal, Str} - end. - -%% this line terminates on an escaped fullstop after a number -%% (but you need to drop the bslash...) -type_ol1([{num, _} = N, - {{punc, bslash}, _}, - {{punc, fullstop}, _} = P | T], Acc) -> - {esc_normal, lists:flatten([lists:reverse(Acc), N, P | T])}; -type_ol1([{num, _} = H | T], Acc) -> - %% we accumulate the digits in case we need to escape a full stop in a normal line - type_ol1(T, [H | Acc]); -type_ol1([{{punc, fullstop}, _}, {{ws, _}, _} | T], _Acc) -> - {ol, T}; -type_ol1(_List, _Acc) -> - normal. - -%% You need to understand what this function is trying to d... -%% '### blah' is fine -%% '### blah ###' is reduced to '### blah' because trailing #'s are -%% just for show but... -%% '##' is like appling '#' to '#' <-- applying 1 less styling to a single # -%% and '###' is like appling '##' to '#' etc, etc -%% but after you hit 6#'s you just get this for a single hash -%% ie '#############' is like applying '######' to a single '#' -%% but/and '######## blah' is like apply '######' to '## blah' -%% strip trailing #'s as they are decorative only... -type_atx(List) -> - {Sz, R} = get_atx_size(List), - A = [{{md, atx}, "#"}], - Type = - case is_all_hashes(R) of - true -> - if - Sz == 1 -> - normal; - ((Sz > 1) andalso (Sz < 6)) -> - Ns = integer_to_list(Sz - 1), - Hn = list_to_atom("h" ++ Ns), - {Hn, A}; - ((Sz == 6) andalso (R == [])) -> - {h5, A}; - ((Sz == 6) andalso (R == [{{lf, lf}, "\n"}])) -> - {h5, A}; - ((Sz == 6) andalso (R == [{{lf, crlf}, "\r\n"}])) -> - {h5, A}; - ((Sz == 6) andalso (R =/= [])) -> - {h6, A} - end; - false -> - Ns = integer_to_list(Sz), - Hn = list_to_atom("h" ++ Ns), - {Hn, strip_atx(R)} - end, - {Type, List}. - -is_all_hashes([]) -> - true; -is_all_hashes([{{md, atx}, _} | T]) -> - is_all_hashes(T); -is_all_hashes([{{lf, _}, _} | []]) -> - true; -is_all_hashes(_List) -> - false. - -get_atx_size(List) -> - g_atx_size1(List, 0). - - % this function also strips whitespace to the left... -g_atx_size1([{{md, atx}, _} = A | T], N) when N == 6 -> - {6, [A | T]}; -g_atx_size1([{{md, atx}, _} | T], N) -> - g_atx_size1(T, N + 1); -g_atx_size1([{{ws, _}, _} | T], N) -> - g_atx_size1(T, N); -g_atx_size1(List, N) -> - {N, List}. - -strip_atx(List) -> - lists:reverse(s_atx1(lists:reverse(List))). - -s_atx1([{{lf, _}, _}, {{md, atx}, _} | T]) -> - s_atx1(T); -s_atx1([{{md, atx}, _} | T]) -> - s_atx1(T); -s_atx1(List) -> - List. - -type_setext_h1(List) -> - type_s_h1_1(List, []). - -%% terminates on running out or new line -type_s_h1_1([{{lf, _}, _} = L | []], Acc) -> - {setext_h1, lists:reverse([L | Acc])}; -type_s_h1_1([], Acc)-> - {setext_h1, lists:reverse(Acc)}; -type_s_h1_1([[] | T], Acc) -> - type_s_h1_1(T, Acc); -type_s_h1_1([{{md, eq}, _} = H | T], Acc) -> - type_s_h1_1(T, [H | Acc]); -type_s_h1_1(L, Acc) -> - {normal, lists:flatten([Acc | L])}. - -type_setext_h2(List) -> - case type_s_h2_1(List) of - h2_or_hr -> - {h2_or_hr, List}; - not_h2 -> - {type_s_h2_2(trim_right(List)), List} - end. -%% terminates on running out or new line -type_s_h2_1([{{lf, _}, _} | []]) -> - h2_or_hr; -type_s_h2_1([]) -> - h2_or_hr; -type_s_h2_1([[] | T]) -> - type_s_h2_1(T); -type_s_h2_1([{{md, dash}, _} | T]) -> - type_s_h2_1(T); -type_s_h2_1(_L) -> - not_h2. - -type_s_h2_2([{{md, dash}, _}, {{ws,_}, _}, - {{md, dash}, _}, {{ws, _}, _}, - {{md, dash}, _}]) -> - hr; -type_s_h2_2([{{md, dash}, _}, - {{ws, _}, _} = WS | T]) -> - {ul, make_list_str([WS | T])}; -type_s_h2_2(_List) -> - normal. - -type_ws(List) -> - case type_ws1(List) of - blank -> - {blank, List}; - try_codeblock -> - case type_ws2(List) of - normal -> - {normal, List}; - {codeblock, Ret} -> - {{codeblock, Ret}, List} - end - end. - -type_ws1([]) -> - blank; -type_ws1([{{lf, _}, _} | []]) -> - blank; -type_ws1([[] | T]) -> - type_ws1(T); -type_ws1([{{ws, _}, _} | T]) -> - type_ws1(T); -type_ws1(_L) -> - try_codeblock. - -%% 4 or more spaces takes you over the limit -%% (a tab is 4...) -type_ws2([{{ws, tab}, _} | T]) -> - {codeblock, T}; -type_ws2([{{ws, comp}, W} | T]) -> - case gt(W, 4) of - {true, R} -> - {codeblock, [R| T]}; - false -> - normal - end; -type_ws2([{{ws, sp}, _} | _T]) -> - normal. - -gt(String, Len) -> - ExpString = re:replace(String, "\t", " ", [{return, list}]), - ExpStringLen = length(ExpString), - if - ExpStringLen >= Len -> - WS = string:substr(ExpString, Len + 1, - ExpStringLen), - {true, {{ws, sp}, WS}}; - ExpStringLen < Len -> - false - end. - -%% make a tag into a string -make_tag_str(L, R) -> - make_tag1(L, R, []). - -make_tag1([], _R, Acc) -> - lists:reverse(Acc); -make_tag1([{{{tag, _Type}, _Tag}, B} | T], R, Acc) -> - make_tag1(T, R, [B | Acc]); -make_tag1([H | T], R, Acc) -> - make_tag1(T, R, [make_string([H], R) | Acc]). - -esc_tag(String) -> - esc_tag1(String, []). - -esc_tag1([], Acc) -> - lists:reverse(Acc); -esc_tag1([?NBSP | T], Acc) -> - esc_tag1(T, [?SPACE | Acc]); % non-breaking space to space -esc_tag1([H | T], Acc) -> - esc_tag1(T, [H | Acc]). - -%% if it is a list we need to discard the initial white space... -make_list_str([{{ws, _}, _} | T] = List) -> - case is_double_indent(List) of - false -> - T; - {true, R} -> - lists:flatten([{tags, "<pre><code>"} ,R , - {tags, "</code></pre>\n\n"} | []]) - end. - -%% All ref processing can ignore the original values 'cos those -%% have already been captured at a higher level -snip_ref(List) -> - case get_id(List) of - {[{_, Id}], Rest} -> - {_Rest2, Ref, Title} = parse_inline(Rest), - Ref2 = trim(Ref), - Rs = htmlencode(make_plain_string(Ref2)), - Ts = make_plain_string(Title), - {inlineref, {Id, {Rs, Ts}}}; - normal -> - normal - end. - -get_id(List) -> - g_id1(List, []). - -g_id1([], _Acc) -> - normal; -g_id1([{{inline, close}, _}, {{punc, colon}, _}, {{ws, _}, _} | T], Acc) -> - {lists:reverse(Acc), T}; -g_id1([H | T], Acc) -> - g_id1(T, [H | Acc]). - -parse_inline(List) -> - parse_inline1(List, []). - -%% snip off the terminal linefeed (if there is one...) -parse_inline1([{{lf, _}, _} | []], A) -> - {[], lists:reverse(A), []}; -parse_inline1([], A) -> - {[], lists:reverse(A), []}; -parse_inline1([{{punc, bslash}, _}, {bra, _} = B | T], A) -> - %% brackets can be escaped - parse_inline1(T, [B | A]); -parse_inline1([{{punc, bslash}, _}, {ket, _} = B | T], A) -> - parse_inline1(T, [B | A]); -parse_inline1([{{punc, bslash}, _}, {{punc, doubleq}, _} = Q | T], A) -> - parse_inline1(T, [Q | A]); -parse_inline1([{{punc, bslash}, _}, {{punc, singleq}, _} = Q | T], A) -> - parse_inline1(T, [Q | A]); -parse_inline1([{{punc, doubleq}, _} | T], A) -> - %% these clauses capture the start of the title... - parse_inline2(T, lists:reverse(A), doubleq, []); -parse_inline1([{{punc, singleq}, _} | T], A) -> - parse_inline2(T, lists:reverse(A), singleq, []); -parse_inline1([{bra, _} | T], A) -> - parse_inline2(T, lists:reverse(A), brackets, []); -parse_inline1([{ket, _} | T], A) -> - {T, lists:reverse(A), []}; -parse_inline1([H | T], A) -> - parse_inline1(T, [H | A]). - -%% this gets titles in single and double quotes -%% the delimiter type is passed in as 'D' -parse_inline2([], Url, _D, A) -> - {[], Url, lists:flatten(lists:reverse(A))}; - -parse_inline2([{{punc, bslash}, _}, {bra, _} = B | T], Url, D, A) -> - %% brackets can be escaped - parse_inline2(T, Url, D, [B | A]); -parse_inline2([{{punc, bslash}, _}, {ket, _} = B | T], Url, D, A) -> - parse_inline2(T, Url, D, [B | A]); -parse_inline2([{{punc, bslash}, _}, - {{punc, doubleq}, _}= Q | T], Url, D, A) -> - %% quotes can be escaped - parse_inline2(T, Url, D, [Q | A]); -parse_inline2([{{punc, bslash}, _}, {{punc, singleq}, _} = Q | T], Url, D, A) -> - parse_inline2(T, Url, D, [Q | A]); -parse_inline2([{{punc, doubleq}, _} | T], Url, doubleq, A) -> - %% these clauses capture the end of the title and drop the delimiter... - parse_inline2(T, Url, none, A); -parse_inline2([{{punc, singleq}, _} | T], Url, singleq, A) -> - parse_inline2(T, Url, none, A); -parse_inline2([{ket, _} | T], Url, brackets, A) -> - parse_inline2(T, Url, none, A); -parse_inline2([{ket, _} | T], Url, none, A) -> - %% terminator clause - {T, Url, lists:flatten(lists:reverse(A))}; -parse_inline2([_H | T], Url, none, A) -> - %% this clause silently discards stuff after the delimiter... - parse_inline2(T, Url, none, [A]); -parse_inline2([H | T], Url, D, A) -> - parse_inline2(T, Url, D, [H | A]). - -trim(String) -> - trim_left(trim_right(String)). - -trim_right(String) -> - lists:reverse(trim_left(lists:reverse(String))). - -trim_left([{{ws, _}, _} | T]) -> - trim_left(T); -trim_left([[] | T]) -> - trim_left(T); -trim_left(List) -> - List. - -snip(List) -> - List2 = lists:reverse(List), - case List2 of - [{{lf, _}, _} | T] -> - lists:reverse(T); - _ -> - List - end. - -%% end of ref processing - -%% -%% Build the Lexed Token List -%% This is a two part lexer, first it chunks the input and then on the second -%% pass it gathers it into lines and types the lines -%% -%% NOTE that there are two different styles of processing lines: -%% * markdown transformed -%% * block -%% inside block processing the whole text is dumped and just url encoded -%% and the original text is always maintained during the lexing/parsing -%% so that it can be recreated if the context requires it... -%% - -lex(String) -> - merge_ws(lex1(String, [], [])). - -merge_ws(List) -> - merge_ws1(List, []). - -merge_ws1([], Acc) -> - lists:reverse(Acc); -merge_ws1([{{ws, _}, W1}, {{ws, _}, W2} | T], Acc) -> - merge_ws1([{{ws, comp}, W1 ++ W2} | T], Acc); -merge_ws1([H | T], Acc) -> - merge_ws1(T, [H | Acc]). - -%% this is the terminal head which ends the parsing... -lex1([], [], A2) -> - lists:flatten(lists:reverse(A2)); -lex1([], A1, A2) -> - lex1([], [], [lex2(A1) | A2]); -%% these two heads capture opening and closing tags -lex1([$<, $/|T], A1, A2) -> - {Tag, NewT} = closingdiv(T, []), - lex1(NewT, [], [Tag, lex2(A1) | A2]); -lex1([$< | T], A1, A2) -> - {Tag, NewT} = openingdiv(T), - lex1(NewT, [], [Tag , lex2(A1) | A2]); -%% these clauses are the normal lexer clauses -lex1([$= | T], A1, A2) -> - lex1(T, [], [{{md, eq}, "="}, lex2(A1) | A2]); -lex1([$- | T], A1, A2) -> - lex1(T, [], [{{md, dash}, "-"}, lex2(A1) | A2]); -lex1([$# | T], A1, A2) -> - lex1(T, [], [{{md, atx}, "#"}, lex2(A1) | A2]); -lex1([$> | T], A1, A2) -> - lex1(T, [], [{{md, gt}, ">"}, lex2(A1) | A2]); -lex1([$+ | T], A1, A2) -> - lex1(T, [], [{{md, plus}, "+"}, lex2(A1) | A2]); -lex1([$* | T], A1, A2) -> - lex1(T, [], [{{md, star}, "*"}, lex2(A1) | A2]); -lex1([$_ | T], A1, A2) -> - lex1(T, [], [{{md, underscore}, "_"}, lex2(A1) | A2]); -lex1([$1 | T], A1, A2) -> - lex1(T, [], [{num, "1"}, lex2(A1) | A2]); -lex1([$2 | T], A1, A2) -> - lex1(T, [], [{num, "2"}, lex2(A1) | A2]); -lex1([$3 | T], A1, A2) -> - lex1(T, [], [{num, "3"}, lex2(A1) | A2]); -lex1([$4 | T], A1, A2) -> - lex1(T, [], [{num, "4"}, lex2(A1) | A2]); -lex1([$5 | T], A1, A2) -> - lex1(T, [], [{num, "5"}, lex2(A1) | A2]); -lex1([$6 | T], A1, A2) -> - lex1(T, [], [{num, "6"}, lex2(A1) | A2]); -lex1([$7 | T], A1, A2) -> - lex1(T, [], [{num, "7"}, lex2(A1) | A2]); -lex1([$8 | T], A1, A2) -> - lex1(T, [], [{num, "8"}, lex2(A1) | A2]); -lex1([$9 | T], A1, A2) -> - lex1(T, [], [{num, "9"}, lex2(A1) | A2]); -lex1([$0 | T], A1, A2) -> - lex1(T, [], [{num, "0"}, lex2(A1) | A2]); -lex1([$. | T], A1, A2) -> - lex1(T, [], [{{punc, fullstop}, "."}, lex2(A1) | A2]); -lex1([$: | T], A1, A2) -> - lex1(T, [], [{{punc, colon}, ":"}, lex2(A1) | A2]); -lex1([$' | T], A1, A2) -> - lex1(T, [], [{{punc, singleq}, "'"}, lex2(A1) | A2]); %' -lex1([$" | T], A1, A2) -> - lex1(T, [], [{{punc, doubleq}, "\""}, lex2(A1) | A2]); %" -lex1([$` | T], A1, A2) -> - lex1(T, [], [{{punc, backtick}, "`"}, lex2(A1) | A2]); %" -lex1([$! | T], A1, A2) -> - lex1(T, [], [{{punc, bang}, "!"}, lex2(A1) | A2]); %" -lex1([$\\ | T], A1, A2)-> - lex1(T, [], [{{punc, bslash}, "\\"}, lex2(A1) | A2]); %" -lex1([$/ | T], A1, A2) -> - lex1(T, [], [{{punc, fslash}, "/"}, lex2(A1) | A2]); %" -lex1([$( | T], A1, A2) -> - lex1(T, [], [{bra, "("}, lex2(A1) | A2]); -lex1([$) | T], A1, A2) -> - lex1(T, [], [{ket, ")"}, lex2(A1) | A2]); -lex1([$[ | T], A1, A2) -> - lex1(T, [], [{{inline, open}, "["}, lex2(A1) | A2]); -lex1([$] | T], A1, A2) -> - lex1(T, [], [{{inline, close}, "]"}, lex2(A1) | A2]); -lex1([?SPACE | T], A1, A2) -> - %% note there is a special 'whitespace' {{ws, none}, ""} which is used to generate non-space - %% filling whitespace for cases like '*bob* is great' which needs a non-space filling - %% whitespace prepended to trigger emphasis so it renders as "<em>bob</em> is great... - %% that 'character' doesn't exist so isn't in the lexer but appears in the parser - lex1(T, [], [{{ws, sp}, " "}, lex2(A1) | A2]); -lex1([?TAB | T], A1, A2) -> - lex1(T, [], [{{ws, tab}, "\t"}, lex2(A1) | A2]); -lex1([?NBSP | T], A1, A2) -> - lex1(T, [], [{{ws, sp}, " "}, lex2(A1) | A2]); -lex1([?CR, ?LF | T], A1, A2) -> - lex1(T, [], [{{lf, crlf}, [?CR , ?LF]}, lex2(A1) | A2]); -lex1([?LF | T], A1, A2) -> - lex1(T, [], [{{lf, lf}, [?LF]}, lex2(A1) | A2]); -lex1([H|T], A1, A2) -> - %% this final clause accumulates line fragments - lex1(T, [H |A1] , A2). - -lex2([]) -> - []; -lex2(List) -> - {string, lists:flatten(lists:reverse(List))}. - -%% need to put in regexes for urls and e-mail addies -openingdiv(String) -> - case get_url(String) of - {{url, URL}, R1} -> - {{url, URL}, R1}; - not_url -> - case get_email_addie(String) of - {{email, EM}, R2} -> - {{email, EM}, R2}; - not_email -> - openingdiv1(String, []) - end - end. - - % dumps out a list if it is not an opening div -openingdiv1([], Acc) -> - {lists:flatten([{{punc, bra}, "<"} | lex(lists:reverse(Acc))]), []}; -openingdiv1([$/,$>| T], Acc) -> - Acc2 = lists:flatten(lists:reverse(Acc)), - Acc3 = string:to_lower(Acc2), - [Tag | _T] = string:tokens(Acc3, " "), - {{{{tag, self_closing}, Tag}, "<" - ++ Acc2 ++ "/>"}, T}; -openingdiv1([$>| T], []) -> - %% special for non-tags - {[{{punc, bra}, "<"}, - {{punc, ket}, ">"}], T}; -openingdiv1([$>| T], Acc) -> - Acc2 = lists:flatten(lists:reverse(Acc)), - Acc3 = string:to_lower(Acc2), - [Tag | _T] = string:tokens(Acc3, " "), - {{{{tag, open}, Tag}, "<" - ++ Acc2 ++ ">"}, T}; -openingdiv1([H|T], Acc) -> - openingdiv1(T, [H | Acc]). - - % dumps out a list if it is not an closing div -closingdiv([], Acc) -> - {lists:flatten([{{punc, bra}, "<"}, {{punc, fslash}, "/"} - | lex(lists:reverse(Acc))]), []}; -closingdiv([$>| T], Acc) -> - Acc2 = lists:flatten(lists:reverse(Acc)), - Acc3 = string:to_lower(Acc2), - [Tag | _T] = string:tokens(Acc3, " "), - {{{{tag, close}, Tag}, "</" - ++ Acc2 ++ ">"}, T}; -closingdiv([H|T], Acc) -> - closingdiv(T, [H | Acc]). - -get_url(String) -> - HTTP_regex = "^(H|h)(T|t)(T|t)(P|p)(S|s)*://", - case re:run(String, HTTP_regex) of - nomatch -> - not_url; - {match, _} -> - get_url1(String, []) - end. - -get_url1([], Acc) -> - URL = lists:flatten(lists:reverse(Acc)), - {{url, URL}, []}; -get_url1([$\\, $> | T], Acc) -> - %% allow escaped kets - get_url1(T, [$>, $\\ | Acc]); -get_url1([$> | T], Acc) -> - URL = lists:flatten(lists:reverse(Acc)), - {{url, URL}, T}; -get_url1([H | T], Acc) -> - get_url1(T, [H | Acc]). - -get_email_addie(String) -> - Snip_regex = ">", - case re:run(String, Snip_regex) of - nomatch -> - not_email; - {match, [{N, _} | _T]} -> - {Possible, [$> | T]} = lists:split(N, String), - EMail_regex = "[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+" - ++ "(?:\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*" - ++ "@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+" - ++ "(?:[a-zA-Z]{2}|com|org|net|gov|mil" - ++ "|biz|info|mobi|name|aero|jobs|museum)", - case re:run(Possible, EMail_regex) of - nomatch -> - not_email; - {match, _} -> - {{email, Possible}, T} - end - end. - -make_plain_string(List) -> - make_plain_string(List, []). - -make_plain_string([], Acc) -> - lists:flatten(lists:reverse(Acc)); -make_plain_string([{{ws, none}, none} | T], Acc) -> - make_plain_string(T, [" " | Acc]); -make_plain_string([{_, Str} | T], Acc) -> - make_plain_string(T, [Str | Acc]). - -make_escape_string(List, Refs) -> - make_escape_string(List, Refs, []). - -make_escape_string([], _R, A) -> - lists:flatten(lists:reverse(A)); -make_escape_string([{tags, Tag} | T], R, A) -> - make_escape_string(T, R, [{tags, Tag} | A]); -make_escape_string([H | T], R, A) -> - make_escape_string(T, R, [make_string([H], R) | A]). - - -make_string(List, Refs) -> - make_string(List, Refs, []). - -make_string([], _R, A) -> - Flat = lists:flatten(lists:reverse(A)), - htmlchars(Flat); -make_string([{{punc, bang}, B}, {{inline, open}, O} | T], R, A) -> - case get_inline(T, R, [], img) of - {Rest, {Url, Title, Acc}} -> - Tag = [make_img_tag(Url, Acc, Title)], - make_string(Rest, R, [Tag | A]); - {Rest, Tag} -> - make_string(Rest, R, [Tag, O, B | A]) - end; -%% escape inline open's... -make_string([{{punc, bslash}, _}, {{inline, open}, O} | T], R, A) -> - make_string(T, R, [O | A]); -make_string([{{inline, open}, O} | T], R, A) -> - case get_inline(T, R, [], url) of - {Rest, {Url, Title, Acc}} -> - Tit = case Title of - [] -> - []; - _ -> - " title=\"" ++ Title ++ "\"" - end, - Tag = [{tags, "<a href=\"" ++ Url ++ "\"" - ++ Tit ++ ">"}, Acc, - {tags, "</a>"} | []], - make_string(Rest, R, [Tag | A]); - {Rest, Tag} -> - make_string(Rest, R, [Tag, O | A]) - end; -make_string([{email, Addie} | T], R, A) -> - make_string(T, R, [{tags, "\" />"}, Addie, {tags, "<a href=\"mailto:"}| A]); -make_string([{url, Url} | T], R, A) -> - make_string(T, R, [ {tags, "</a>"}, Url, {tags, "\">"}, Url, - {tags, "<a href=\""} | A]); -make_string([{tags, _} = Tag | T], R, A) -> - make_string(T, R, [Tag | A]); -make_string([{{{tag, Type}, Tag}, _} | T], R, A) -> - Tag2 = esc_tag(Tag), - TagStr = case Type of - open -> - {tags, "<" ++ Tag2 ++ ">"}; - close -> - {tags, "</" ++ Tag2 ++ ">"}; - self_closing -> - {tags, "<" ++ Tag2 ++ " />"} - end, - make_string(T, R, [TagStr | A]); -make_string([{_, Orig} | T], R, A) -> - make_string(T, R, [Orig | A]). - -get_inline([], _R, A, _) -> - %% if the inline doesn't terminate its not an inline... - {[], make_plain_string(lists:reverse(A))}; -get_inline([{{punc, bang}, _B}, {{inline, open}, _O} | T], R, A, url) -> - %% a url can contain an image inline - {Rest, {Url, Title, Acc}} = get_inline(T, R, A, img), - Tag = make_img_tag(Url, Acc, Title), - %% We double tag the tag so that it can get through the lists:flatteners.. - get_inline(Rest, R, [{tags, Tag} | A], url); -get_inline([{{inline, close}, _}, {bra, _} | T], _R, A, _) -> - {Rest, Url, Title} = parse_inline(T), - Tag = {string:strip(make_plain_string(Url)), - make_plain_string(Title), - make_plain_string(lists:reverse(A))}, - {Rest, Tag}; -get_inline([{{inline, close}, _}, {{ws, sp}, _}, {bra, _} | T], _R, A, img) -> - %% for img's but not url's you need to allow a single space between them - %% to be compatible with showdown :( - {Rest, Url, Title} = parse_inline(T), - Tag = {string:strip(make_plain_string(Url)), - make_plain_string(Title), - make_plain_string(lists:reverse(A))}, - {Rest, Tag}; -get_inline([{{inline, close}, _}, {{inline, open}, _} | T], R, A, _) -> - %% this clause detects references to images/links... - Text = make_plain_string(lists:reverse(A)), - case get_id_diff(T) of - normal -> - {[], make_plain_string(lists:reverse(A))}; - {[{_, Id}], Rest} -> - {Url, Title} = case lists:keyfind(Id, 1, R) of - false -> - {"", ""}; - {Id, {U, Tit}} -> - {U, Tit} - end, - Tag = {Url, Title, Text}, - {Rest, Tag}; - _Other -> - {[], make_plain_string(lists:reverse(A))} % random failing id's - end; -get_inline([{{inline, close}, _} = C , {{ws, _}, _}, - {{inline, open}, _} = O | T], R, A, Type) -> - %% so does this one - just delete the space and rethrow it - get_inline([C, O | T], R, A, Type); -get_inline([{{inline, close}, _} | T], R, A, _) -> - %% this is the markdown extension clause that takes an id in square brackets without - %% any additional stuff as a valid id marker - Id = make_plain_string(lists:reverse(A)), - case lists:keyfind(Id, 1, R) of - false -> - {T, lists:flatten([Id , $]])}; - {Id, {Url, Title}} -> - Tag = {Url, Title, Id}, - {T, Tag} - end; -get_inline([H | T], R, A, Type) -> - get_inline(T, R, [H | A], Type). - -get_id_diff(List) -> - g_id_diff1(List, []). - -g_id_diff1([], _Acc) -> - normal; -g_id_diff1([{{inline, close}, _}| T], Acc) -> - {lists:reverse(Acc), T}; -g_id_diff1([H | T], Acc) -> - g_id_diff1(T, [H | Acc]). - -%% convert ascii into html characters -htmlencode(List) -> - htmlencode(List, []). - -htmlencode([], Acc) -> - lists:flatten(lists:reverse(Acc)); - -htmlencode([$& | Rest], Acc) -> - htmlencode(Rest, ["&" | Acc]); -htmlencode([$< | Rest], Acc) -> - htmlencode(Rest, ["<" | Acc]); -htmlencode([$> | Rest], Acc) -> - htmlencode(Rest, [">" | Acc]); -htmlencode([160 | Rest], Acc) -> - htmlencode(Rest, [" " | Acc]); -htmlencode([Else | Rest], Acc) -> - htmlencode(Rest, [Else | Acc]). - -htmlchars(List) -> - htmlchars1(List, []). - -htmlchars1([], Acc) -> - lists:flatten(lists:reverse(Acc)); -htmlchars1([{tags, Tag} | T], Acc) -> - %% tags are just wheeched out unescaped - htmlchars1(T, [Tag | Acc]); -htmlchars1([?CR, ?LF | T], Acc) -> - %% line ends are pushed to a space.. - htmlchars1(T, ["\n" | Acc]); -htmlchars1([?LF | T], Acc) -> - %% line ends are pushed to a space.. - htmlchars1(T, ["\n" | Acc]); -htmlchars1([?CR | T], Acc) -> - htmlchars1(T, ["\r" | Acc]); -htmlchars1([$\\, $*, $*, $* | T], A) -> - %% there is a non-space filling white space represented by the atom 'none' - %% which is created in the parser (NOT IN THE LEXER!) and which triggers - %% emphasis or strong tags being turned on... - htmlchars1(T, [$*, $*, $* | A]); -htmlchars1([$*, $*, $* | T], A) -> - {T2, NewA} = superstrong(T, $*), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $*, $* | T], A) -> - %% repeat for strong - htmlchars1(T, [$*, $* | A]); -htmlchars1([$*, $* | T], A) -> - {T2, NewA} = strong(T, $*), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $* | T], A) -> - %% likewise for strong - htmlchars1(T, [$* | A]); -htmlchars1([$* | T], A) -> - {T2, NewA} = emphasis(T, $*), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $_, $_, $_ | T], A) -> - %% and again for underscores - htmlchars1(T, [$_, $_, $_ | A]); -htmlchars1([$_, $_, $_ | T], A) -> - %% the none atom is the non-space filling whitespace - {T2, NewA} = superstrong(T, $_), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $_, $_ | T], A) -> - %% and strong - %% and again for underscores - htmlchars1(T, [$_, $_ | A]); -htmlchars1([$_, $_ | T], A) -> - {T2, NewA} = strong(T, $_), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $_ | T], A) -> - %% likewise for strong - htmlchars1(T, [$_ | A]); -htmlchars1([$_ | T], A) -> - {T2, NewA} = emphasis(T, $_), - htmlchars1(T2, [NewA | A]); -htmlchars1([$\\, $` | T], A) -> - %% handle backtick escaping - htmlchars1(T, [$` | A]); -htmlchars1([$`, $` | T], A) -> - {T2, NewA} = dblcode(T), - htmlchars1(T2, [NewA | A]); -htmlchars1([$` | T], A) -> - {T2, NewA} = code(T), - htmlchars1(T2, [NewA | A]); -htmlchars1([?COPY | T], A) -> - htmlchars1(T, ["©" | A]); -htmlchars1([?AMP | T], A) -> - htmlchars1(T, ["&" | A]); -htmlchars1([$& | T], A) -> - htmlchars1(T, ["&" | A]); -htmlchars1([$< | T], A) -> - htmlchars1(T, ["<" | A]); -htmlchars1([?NBSP | T], A) -> - htmlchars1(T, [" " | A]); -htmlchars1([?TAB | T], A) -> - htmlchars1(T, [" " | A]); -htmlchars1([none | T], A) -> - htmlchars1(T, A); -htmlchars1([H | T], A) -> - htmlchars1(T, [H | A]). - -emphasis(List, Delim) -> - interpolate(List, Delim, "em", "" ,[]). -strong(List, Delim) -> - interpolate2(List, Delim, "strong", "", []). -superstrong(List, Delim) -> - interpolate3(List, Delim, "strong", "em", "", []). -dblcode(List) -> - {T, Tag} = interpolate2(List, $`, "code", "" ,[]), - {T, "<pre>" ++ Tag ++ "</pre>"}. -code(List) -> - interpolateX(List, $`, "code", "", []). - -%% pain in the arse - sometimes the closing tag should be preceded by -%% a "\n" and sometimes not in showdown.js -%% interpolate is for single delimiters... -interpolateX([], Delim, _Tag, _X, Acc) -> - {[], [Delim] ++ htmlchars(lists:reverse(Acc))}; -interpolateX([Delim | T], Delim, Tag, X, Acc) -> - {T, "<" ++ Tag ++ ">" ++ htmlchars(lists:reverse(Acc)) ++ X ++ - "</" ++ Tag ++ ">"}; -interpolateX([H | T], Delim, Tag, X, Acc) -> - interpolateX(T, Delim, Tag, X, [H | Acc]). - -interpolate([], Delim, _Tag, _X, Acc) -> - {[], [Delim] ++ htmlchars(lists:reverse(Acc))}; -interpolate([Delim | T], Delim, Tag, X, Acc) -> - {T, "<" ++ Tag ++ ">" ++ htmlchars(lists:reverse(Acc)) ++ X ++ - "</" ++ Tag ++ ">"}; -interpolate([H | T], Delim, Tag, X, Acc) -> - interpolate(T, Delim, Tag, X, [H | Acc]). - -%% interpolate two is for double delimiters... -interpolate2([], Delim, _Tag, _X, Acc) -> - {[], [Delim] ++ [Delim] ++ htmlchars(lists:reverse(Acc))}; -interpolate2([Delim, Delim | T], Delim, Tag, X, Acc) -> - {T, "<" ++ Tag ++ ">" ++ htmlchars(lists:reverse(Acc)) ++ X ++ - "</" ++ Tag ++ ">"}; -interpolate2([H | T], Delim, Tag, X, Acc) -> - interpolate2(T, Delim, Tag, X, [H | Acc]). - -%% interpolate three is for double delimiters... -interpolate3([], D, _Tag1, Tag2, _X, Acc) -> - {[], "<" ++ Tag2 ++ ">" ++ [D] ++ "</" ++ Tag2 ++ ">" - ++ htmlchars(lists:reverse(Acc))}; -interpolate3([D, D, D | T], D, Tag1, Tag2, _X, Acc) -> - {T, "<" ++ Tag1 ++ ">" ++ "<" ++ Tag2 ++ ">" - ++ htmlchars(lists:reverse(Acc)) ++ "</" ++ Tag2 ++ ">" - ++ "</" ++ Tag1 ++ ">"}; -interpolate3([H | T], D, Tag1, Tag2, X, Acc) -> - interpolate3(T, D, Tag1, Tag2, X, [H | Acc]). - -make_img_tag(Url, Acc, Title) -> - {tags, "<img src=\"" ++ Url ++ "\"" - ++ " alt=\"" ++ Acc ++ "\"" - ++ " title=\"" ++ Title ++ "\"" - ++ " />"}. diff --git a/examples/markdown_middleware/src/markdown_converter.erl b/examples/markdown_middleware/src/markdown_converter.erl index 988de9d..222d77c 100644 --- a/examples/markdown_middleware/src/markdown_converter.erl +++ b/examples/markdown_middleware/src/markdown_converter.erl @@ -17,7 +17,7 @@ maybe_generate_markdown(Path) -> ModifiedAt = filelib:last_modified(source_path(Path)), GeneratedAt = filelib:last_modified(Path), case ModifiedAt > GeneratedAt of - true -> erlmarkdown:conv_file(source_path(Path), Path); + true -> markdown:conv_file(source_path(Path), Path); false -> ok end. diff --git a/examples/rest_basic_auth/Makefile b/examples/rest_basic_auth/Makefile index dc7bd5c..dab6aea 100644 --- a/examples/rest_basic_auth/Makefile +++ b/examples/rest_basic_auth/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/rest_hello_world/Makefile b/examples/rest_hello_world/Makefile index 3452a44..cb20651 100644 --- a/examples/rest_hello_world/Makefile +++ b/examples/rest_hello_world/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/rest_pastebin/Makefile b/examples/rest_pastebin/Makefile index 994f4a4..16d348f 100644 --- a/examples/rest_pastebin/Makefile +++ b/examples/rest_pastebin/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/ssl_hello_world/Makefile b/examples/ssl_hello_world/Makefile index 47676a9..be51964 100644 --- a/examples/ssl_hello_world/Makefile +++ b/examples/ssl_hello_world/Makefile @@ -6,4 +6,6 @@ DEPS = cowboy LOCAL_DEPS = ssl dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/ssl_hello_world/README.asciidoc b/examples/ssl_hello_world/README.asciidoc index 70ee7f8..feaa60c 100644 --- a/examples/ssl_hello_world/README.asciidoc +++ b/examples/ssl_hello_world/README.asciidoc @@ -9,8 +9,9 @@ $ make run Then point your browser to https://localhost:8443 -You will need to temporarily trust the root certificate authority, -which can also be found in `priv/ssl/cowboy-ca.crt`. +You will be greeted by a security message. You can ask for more +information and ultimately accept to access localhost. This is +due to the example using a self-signed certificate. Recent browsers will communicate using HTTP/2. Older browsers will use HTTP/1.1. @@ -19,7 +20,7 @@ will use HTTP/1.1. [source,bash] ---- -$ curl --cacert priv/ssl/cowboy-ca.crt -i https://localhost:8443 +$ curl -k -i https://localhost:8443 HTTP/1.1 200 OK connection: keep-alive server: Cowboy diff --git a/examples/ssl_hello_world/priv/ssl/cert.pem b/examples/ssl_hello_world/priv/ssl/cert.pem new file mode 100644 index 0000000..69ed65f --- /dev/null +++ b/examples/ssl_hello_world/priv/ssl/cert.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDTzCCAjegAwIBAgIUD7jNyCgABo8GlnEojOSTFWZzkJswDQYJKoZIhvcNAQEL +BQAwNzELMAkGA1UEBhMCRlIxEzARBgNVBAgMClNvbWUtU3RhdGUxEzARBgNVBAoM +Ck5pbmUgTmluZXMwHhcNMjQwMTI2MTQyODExWhcNMzcxMDA0MTQyODExWjA3MQsw +CQYDVQQGEwJGUjETMBEGA1UECAwKU29tZS1TdGF0ZTETMBEGA1UECgwKTmluZSBO +aW5lczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKfNEwF0v1Gm2e6a +M4hqI3JhmerZSNYWw8NiaUybR5hVUS9X4Chk+/y8kBLX2OYbGGlAxgbOZJa5D+kf +H1iakoUQaILinxPx3yxtIOePS3q/Xi5/EBVTdwLOoI26oSdzY2RTKKAPO1PCcAjq +6gDpw2u7q26sSU1kul6dD4Wle6+yNtnJdNKo9zLCLXr6TtuHdvbAU1oblLCKZ1Db +/uLkhGaUI/EUNeU1ZJrPmnoneYkTcG5mC5PMFVhqJ3bNYez5Hgr2Ra1Fz0dVgmRM +FpJ8NF6UQgA9dAs2Oh1uWbTjJiX0tO92RslXlhpLHS2VKZWsxiN2bniNXsNKzQ9M +ty0qnxkCAwEAAaNTMFEwHQYDVR0OBBYEFKuBPzB9rBCJNAnUyQMXjkVKIMJlMB8G +A1UdIwQYMBaAFKuBPzB9rBCJNAnUyQMXjkVKIMJlMA8GA1UdEwEB/wQFMAMBAf8w +DQYJKoZIhvcNAQELBQADggEBAHWXDKlY39csROTQ2Dm3CnTj14tj3cW4onsOYTKW +FSlVdMOk3+ionB4vZA/Ino8OjrjiZ2dB3Tvl2J+AxEea3ltDbdh6qVuqSwvQZCeV +8gWp05wzyTfIpQRD10ZwOU6dzR89T+o7oG/7D8Ydk3nzecthF1aU0YBW8OtuZFog +lC/PIIoVEyUiTEnFJrkQge1OmZWiAuImIed+cEmkw9ZAN2/9i/OxWZKAGoKrmfPq +kzdOoxxFRLnqHo2OYdA0IPpSuGK5ayjYrLgXW0Wa4FKzmDh7Gy+JSrvLuFur9PEi +D0Encva2uX1hAcFQDrzICTsD6ANuIbw0cmlrCJYH6E21PrM= +-----END CERTIFICATE----- diff --git a/examples/ssl_hello_world/priv/ssl/cowboy-ca.crt b/examples/ssl_hello_world/priv/ssl/cowboy-ca.crt deleted file mode 100644 index a35ac39..0000000 --- a/examples/ssl_hello_world/priv/ssl/cowboy-ca.crt +++ /dev/null @@ -1,16 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICeDCCAeGgAwIBAgIJAOvpU0y2e5J4MA0GCSqGSIb3DQEBBQUAMFUxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczETMBEGA1UECgwKTmluZSBOaW5lczEPMA0G -A1UECwwGQ293Ym95MRAwDgYDVQQDDAdST09UIENBMB4XDTEzMDIyODA1MTAwMVoX -DTMzMDIyMzA1MTAwMVowVTELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFzMRMw -EQYDVQQKDApOaW5lIE5pbmVzMQ8wDQYDVQQLDAZDb3dib3kxEDAOBgNVBAMMB1JP -T1QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMzmY7Us06yjyUbpqwPx -Iv+xh/g3V7we07ClC9GEYnvr3OQvdA1jFEHccMBUUjRoQ8DPd6uSyK5UkixABs08 -Tt5B3VsnGKr0DIN+IO4SN2PkmBqIU/BN3KdcwN65YNr3iM0KsKWeFtAZdYx4CakX -7REbO0wjK20AH3xSBn3uFGiBAgMBAAGjUDBOMB0GA1UdDgQWBBRKfZ8KF2jlLBDm -NL6IuEuGY0pdbzAfBgNVHSMEGDAWgBRKfZ8KF2jlLBDmNL6IuEuGY0pdbzAMBgNV -HRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAG1I0kBxXiLkM1b7rl2zPLizREYg -1m+ajb6rWzPOBg6TXjv58Be+H4tqoHIL/M/crixew5emftBkuAGjiKMhbIokjvan -aPTCV8U6HHvNvz9c68HpESWbd+56cHqfsS5XCKp1OpW5tbL2UQYpFKMP4qmbv3Ea -pBfPPmSFMBb1i2AI ------END CERTIFICATE----- diff --git a/examples/ssl_hello_world/priv/ssl/key.pem b/examples/ssl_hello_world/priv/ssl/key.pem new file mode 100644 index 0000000..3f9fbe4 --- /dev/null +++ b/examples/ssl_hello_world/priv/ssl/key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCnzRMBdL9Rptnu +mjOIaiNyYZnq2UjWFsPDYmlMm0eYVVEvV+AoZPv8vJAS19jmGxhpQMYGzmSWuQ/p +Hx9YmpKFEGiC4p8T8d8sbSDnj0t6v14ufxAVU3cCzqCNuqEnc2NkUyigDztTwnAI +6uoA6cNru6turElNZLpenQ+FpXuvsjbZyXTSqPcywi16+k7bh3b2wFNaG5SwimdQ +2/7i5IRmlCPxFDXlNWSaz5p6J3mJE3BuZguTzBVYaid2zWHs+R4K9kWtRc9HVYJk +TBaSfDRelEIAPXQLNjodblm04yYl9LTvdkbJV5YaSx0tlSmVrMYjdm54jV7DSs0P +TLctKp8ZAgMBAAECggEAR5e6D6l5hUNcgS4+ZWnvhLo6utYI+vrMfFzNE3e+5LIm +CL6D74gicRMcn0WDj62ozSNrOfUuOpZrwOlb7OhKMkataIZ7G73bG6/V1aYwLIdg +jhL9UDQDt2lkXAPwBQ54rhHC6AOHqvVu6ocb3tbd32W7P2V3gvNChuKZAEr6Chwc +1JE5e1k7uZK4rjqZhd86pV2hks/jNknAZpEROTw80qpo3MzlMDMhXyKmyGa84t91 +1bijJ2DMPKsaxSYkWa06Zx3ymiX+qtKFRnSqZo2aEqpeTgQ0hRBSA429d7uCKO0o +kwqOyT85qMFRA+4jfkcAwUi4DELVCFlN/QNWCMH09wKBgQDVuw/sGnjVxCQ/s7pH +FuGA55S1qUtrcYsMHV5uZNtxLOqeAURomgiTpDVNNhLBuJwVjZrBv8Msl1/99EZ7 +8Hws+ERcjlbmyBiq6/VdRW6bJsrFnOS4qUbwWQp0Yztdeu6sTwIEI0KO/oFypf9G +L9mwjXwTvWEFg5etW1BPq+XmMwKBgQDI/KXNul1zCnrOY6sYrbPShYLZgPQRjNi5 +Ho6N5NxRc3xhyzExbjNtA/N/30d+/p7H8ND+TgpsYdjvEqqgpQQmCeg3/n6eSzb2 +hotCVBt8dU2TjD5v68DLzGv61s7PV81e4grkU5nCe+y7zJMwKGQ8BbmYTBBYEO0P +nTHwuwHhgwKBgQCx2B8OopRro/NZwm69Wq+3+HtIkh98vxUptoJuL6RdzzdG1N0c +gRej6t6jadw/sCLI2HSuxaddQnSQt6Oy29AoB0mzDooHLPdBumgH/Y9ksOnHd57m +fYzWz/CgGjY6ueFCJdgSo1ht7h6+zJvWxlhIzeIx9sJ1uSMMEFCKiwoY+wKBgGb+ +kTjLt/er9yKskJEk8nF/WX58RpZ3xteWgRbVoNFcjPDQX3UlM9U5oR52HP1HHbb4 +ASFQfKbtvW1F84o/BdE4YnfPQrN7d779U3+5+hvdQNPLmnNgLHxDVVJFodU++U8W +Jt66uKChQL88JnEXQcZAaMtSr01x3wmRVHY4Xs5hAoGBAMPfa+rcGukjbMF+MZ0P +ZV1Pq7AxVJ/C0XINnpZrsN+e6dO52Y2VXbnQkML7PKZXzSY88QwunBp88VoPlDux +llmLZc54zUFlsC1iHrEzt+hoxFG0tfL83vic5kSx6u5oZdxjZ2InqTzE8TmORU3v +6/ik7Q4VeDQ5uLnR4GiLW+qj +-----END PRIVATE KEY----- diff --git a/examples/ssl_hello_world/priv/ssl/server.crt b/examples/ssl_hello_world/priv/ssl/server.crt deleted file mode 100644 index 0bdfaed..0000000 --- a/examples/ssl_hello_world/priv/ssl/server.crt +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICpTCCAg6gAwIBAgIJAOvpU0y2e5J5MA0GCSqGSIb3DQEBBQUAMFUxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczETMBEGA1UECgwKTmluZSBOaW5lczEPMA0G -A1UECwwGQ293Ym95MRAwDgYDVQQDDAdST09UIENBMB4XDTEzMDIyODA1MjMzNFoX -DTMzMDIyMzA1MjMzNFowVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFzMRMw -EQYDVQQKDApOaW5lIE5pbmVzMQ8wDQYDVQQLDAZDb3dib3kxEjAQBgNVBAMMCWxv -Y2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzbW1GjECzHUc/WST -qLiAGqjCNccR5saVS+yoz2SPRhpoyf0/qBrX5BY0tzmgozoTiRfE4wCiVD99Cc+D -rp/FM49r4EpZdocIovprmOmv/gwkoj95zaA6PKNn1OdmDp2hwJsX2Zm3kpbGUZTx -jDkkccmgUb4EjL7qNHq7saQtivUCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgB -hvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYE -FB6jTEIWI8T1ckORA4GezbyYxtbvMB8GA1UdIwQYMBaAFEp9nwoXaOUsEOY0voi4 -S4ZjSl1vMA0GCSqGSIb3DQEBBQUAA4GBACMboVQjrx8u/fk3gl/sR0tbA0Wf/NcS -2Dzsy2czndgVUAG4Sqb+hfgn0dqAyUKghRrj3JDcYxYksGPIklDfPzZb7yJ39l16 -6x5ZiIzhp8CAVdPvRxRznw5rZwaXesryXu1jVSZxTr3MYZdkG6KaAM0t90+YlGLZ -UG8fAicx0Bf+ ------END CERTIFICATE----- diff --git a/examples/ssl_hello_world/priv/ssl/server.key b/examples/ssl_hello_world/priv/ssl/server.key deleted file mode 100644 index b6f7374..0000000 --- a/examples/ssl_hello_world/priv/ssl/server.key +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIICXQIBAAKBgQDNtbUaMQLMdRz9ZJOouIAaqMI1xxHmxpVL7KjPZI9GGmjJ/T+o -GtfkFjS3OaCjOhOJF8TjAKJUP30Jz4Oun8Uzj2vgSll2hwii+muY6a/+DCSiP3nN -oDo8o2fU52YOnaHAmxfZmbeSlsZRlPGMOSRxyaBRvgSMvuo0eruxpC2K9QIDAQAB -AoGAaD85c/h6bpq7Aj7CBbLaWKhFI3OqwsTITB22vsM7SE+B4zsP02UnG1OVi3UM -zytTUxpUkKV1njQ+bYZYOVqGWF4Up8tTqUglHn0FTPok1AIemELWtz3sXvdSHC1T -lqvFBAZ9kibn13qGyVOiyCFaMwfOM/05RvV7p3jfUMTWnNECQQDs7yCJZ8Ol8MyH -TGZzvkjoN2zg1KwmTbSD1hkP6QAJtPdRuqFbjlEru0/PefgOXsWLRIa3/3v0qw2G -xGkV6AXTAkEA3kNbFisqUydjPnZIYv/P6SvPdUimHJEjXbAbfNfzS9dzszrOVJd2 -XqGH7z5yzjoH3IyaIMW8GnubVzGDSjrHFwJAKSU5vELlygpwKkrNO+pelN0TLlQg -dSJnZ8GlZorq88SWcn37iX/EftivenNO7YftvEqxLoDSkOGnnrC7Iw/A+wJBAIEe -L/QY72WPJCBNJpAce/PA96vyoE1II3txqwZDjZspdpVQPDz4IFOpEwbxCFC1dYuy -Qnd3Z2cbF4r3wIWGz9ECQQCJGNhUNtY+Om1ELdqPcquxE2VRV/pucnvJSTKwyo2C -Rvm6H7kFDwPDuN23YnTOlTiho0zzCkclcIukhIVJ+dKz ------END RSA PRIVATE KEY----- diff --git a/examples/ssl_hello_world/src/ssl_hello_world_app.erl b/examples/ssl_hello_world/src/ssl_hello_world_app.erl index 959dc77..542e4d8 100644 --- a/examples/ssl_hello_world/src/ssl_hello_world_app.erl +++ b/examples/ssl_hello_world/src/ssl_hello_world_app.erl @@ -19,9 +19,8 @@ start(_Type, _Args) -> PrivDir = code:priv_dir(ssl_hello_world), {ok, _} = cowboy:start_tls(https, [ {port, 8443}, - {cacertfile, PrivDir ++ "/ssl/cowboy-ca.crt"}, - {certfile, PrivDir ++ "/ssl/server.crt"}, - {keyfile, PrivDir ++ "/ssl/server.key"} + {certfile, PrivDir ++ "/ssl/cert.pem"}, + {keyfile, PrivDir ++ "/ssl/key.pem"} ], #{env => #{dispatch => Dispatch}}), ssl_hello_world_sup:start_link(). diff --git a/examples/upload/Makefile b/examples/upload/Makefile index af39f92..939d148 100644 --- a/examples/upload/Makefile +++ b/examples/upload/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/examples/websocket/Makefile b/examples/websocket/Makefile index 955cd52..b8cada8 100644 --- a/examples/websocket/Makefile +++ b/examples/websocket/Makefile @@ -5,4 +5,6 @@ PROJECT_VERSION = 1 DEPS = cowboy dep_cowboy_commit = master +REL_DEPS = relx + include ../../erlang.mk diff --git a/rebar.config b/rebar.config index cb76748..146f88f 100644 --- a/rebar.config +++ b/rebar.config @@ -1,4 +1,4 @@ {deps, [ -{cowlib,".*",{git,"https://github.com/ninenines/cowlib","master"}},{ranch,".*",{git,"https://github.com/ninenines/ranch","1.7.1"}} +{cowlib,".*",{git,"https://github.com/ninenines/cowlib",{branch,"master"}}},{ranch,".*",{git,"https://github.com/ninenines/ranch",{tag,"1.8.1"}}} ]}. {erl_opts, [debug_info,warn_export_vars,warn_shadow_vars,warn_obsolete_guard,warn_missing_spec,warn_untyped_record]}. diff --git a/src/cowboy.erl b/src/cowboy.erl index c4be25b..6a5634e 100644 --- a/src/cowboy.erl +++ b/src/cowboy.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -16,13 +16,19 @@ -export([start_clear/3]). -export([start_tls/3]). +-export([start_quic/3]). -export([stop_listener/1]). +-export([get_env/2]). +-export([get_env/3]). -export([set_env/3]). %% Internal. -export([log/2]). -export([log/4]). +%% Don't warn about the bad quicer specs. +-dialyzer([{nowarn_function, start_quic/3}]). + -type opts() :: cowboy_http:opts() | cowboy_http2:opts(). -export_type([opts/0]). @@ -42,35 +48,156 @@ -spec start_clear(ranch:ref(), ranch:opts(), opts()) -> {ok, pid()} | {error, any()}. + start_clear(Ref, TransOpts0, ProtoOpts0) -> TransOpts1 = ranch:normalize_opts(TransOpts0), - {TransOpts, ConnectionType} = ensure_connection_type(TransOpts1), - ProtoOpts = ProtoOpts0#{connection_type => ConnectionType}, + {TransOpts2, DynamicBuffer} = ensure_dynamic_buffer(TransOpts1, ProtoOpts0), + {TransOpts, ConnectionType} = ensure_connection_type(TransOpts2), + ProtoOpts = ProtoOpts0#{ + connection_type => ConnectionType, + dynamic_buffer => DynamicBuffer + }, ranch:start_listener(Ref, ranch_tcp, TransOpts, cowboy_clear, ProtoOpts). -spec start_tls(ranch:ref(), ranch:opts(), opts()) -> {ok, pid()} | {error, any()}. + start_tls(Ref, TransOpts0, ProtoOpts0) -> TransOpts1 = ranch:normalize_opts(TransOpts0), - SocketOpts = maps:get(socket_opts, TransOpts1, []), - TransOpts2 = TransOpts1#{socket_opts => [ - {next_protocols_advertised, [<<"h2">>, <<"http/1.1">>]}, - {alpn_preferred_protocols, [<<"h2">>, <<"http/1.1">>]} - |SocketOpts]}, - {TransOpts, ConnectionType} = ensure_connection_type(TransOpts2), - ProtoOpts = ProtoOpts0#{connection_type => ConnectionType}, + {TransOpts2, DynamicBuffer} = ensure_dynamic_buffer(TransOpts1, ProtoOpts0), + TransOpts3 = ensure_alpn(TransOpts2), + {TransOpts, ConnectionType} = ensure_connection_type(TransOpts3), + ProtoOpts = ProtoOpts0#{ + connection_type => ConnectionType, + dynamic_buffer => DynamicBuffer + }, ranch:start_listener(Ref, ranch_ssl, TransOpts, cowboy_tls, ProtoOpts). +%% @todo Experimental function to start a barebone QUIC listener. +%% This will need to be reworked to be closer to Ranch +%% listeners and provide equivalent features. +%% +%% @todo Better type for transport options. Might require fixing quicer types. + +-spec start_quic(ranch:ref(), #{socket_opts => [{atom(), _}]}, cowboy_http3:opts()) + -> {ok, pid()}. + +%% @todo Implement dynamic_buffer for HTTP/3 if/when it applies. +start_quic(Ref, TransOpts, ProtoOpts) -> + {ok, _} = application:ensure_all_started(quicer), + Parent = self(), + SocketOpts0 = maps:get(socket_opts, TransOpts, []), + {Port, SocketOpts2} = case lists:keytake(port, 1, SocketOpts0) of + {value, {port, Port0}, SocketOpts1} -> + {Port0, SocketOpts1}; + false -> + {port_0(), SocketOpts0} + end, + SocketOpts = [ + {alpn, ["h3"]}, %% @todo Why not binary? + %% We only need 3 for control and QPACK enc/dec, + %% but we need more for WebTransport. %% @todo Use 3 if WT is disabled. + {peer_unidi_stream_count, 100}, + {peer_bidi_stream_count, 100}, + %% For WebTransport. + %% @todo We probably don't want it enabled if WT isn't used. + {datagram_send_enabled, 1}, + {datagram_receive_enabled, 1} + |SocketOpts2], + _ListenerPid = spawn(fun() -> + {ok, Listener} = quicer:listen(Port, SocketOpts), + Parent ! {ok, Listener}, + _AcceptorPid = [spawn(fun AcceptLoop() -> + {ok, Conn} = quicer:accept(Listener, []), + Pid = spawn(fun() -> + receive go -> ok end, + %% We have to do the handshake after handing control of + %% the connection otherwise streams may come in before + %% the controlling process is changed and messages will + %% not be sent to the correct process. + {ok, Conn} = quicer:handshake(Conn), + process_flag(trap_exit, true), %% @todo Only if supervisor though. + try cowboy_http3:init(Parent, Ref, Conn, ProtoOpts) + catch + exit:{shutdown,_} -> ok; + C:E:S -> log(error, "CRASH ~p:~p:~p", [C,E,S], ProtoOpts) + end + end), + ok = quicer:controlling_process(Conn, Pid), + Pid ! go, + AcceptLoop() + end) || _ <- lists:seq(1, 20)], + %% Listener process must not terminate. + receive after infinity -> ok end + end), + receive + {ok, Listener} -> + {ok, Listener} + end. + +%% Select a random UDP port using gen_udp because quicer +%% does not provide equivalent functionality. Taken from +%% quicer test suites. +port_0() -> + {ok, Socket} = gen_udp:open(0, [{reuseaddr, true}]), + {ok, {_, Port}} = inet:sockname(Socket), + gen_udp:close(Socket), + case os:type() of + {unix, darwin} -> + %% Apparently macOS doesn't free the port immediately. + timer:sleep(500); + _ -> + ok + end, + Port. + +ensure_alpn(TransOpts) -> + SocketOpts = maps:get(socket_opts, TransOpts, []), + TransOpts#{socket_opts => [ + {alpn_preferred_protocols, [<<"h2">>, <<"http/1.1">>]} + |SocketOpts]}. + ensure_connection_type(TransOpts=#{connection_type := ConnectionType}) -> {TransOpts, ConnectionType}; ensure_connection_type(TransOpts) -> {TransOpts#{connection_type => supervisor}, supervisor}. +%% Dynamic buffer was set; accept transport options as-is. +%% Note that initial 'buffer' size may be lower than dynamic buffer allows. +ensure_dynamic_buffer(TransOpts, #{dynamic_buffer := DynamicBuffer}) -> + {TransOpts, DynamicBuffer}; +%% Dynamic buffer was not set; define default dynamic buffer +%% only if 'buffer' size was not configured. In that case we +%% set the 'buffer' size to the lowest value. +ensure_dynamic_buffer(TransOpts=#{socket_opts := SocketOpts}, _) -> + case proplists:get_value(buffer, SocketOpts, undefined) of + undefined -> + {TransOpts#{socket_opts => [{buffer, 1024}|SocketOpts]}, {1024, 131072}}; + _ -> + {TransOpts, false} + end. + -spec stop_listener(ranch:ref()) -> ok | {error, not_found}. + stop_listener(Ref) -> ranch:stop_listener(Ref). +-spec get_env(ranch:ref(), atom()) -> ok. + +get_env(Ref, Name) -> + Opts = ranch:get_protocol_options(Ref), + Env = maps:get(env, Opts, #{}), + maps:get(Name, Env). + +-spec get_env(ranch:ref(), atom(), any()) -> ok. + +get_env(Ref, Name, Default) -> + Opts = ranch:get_protocol_options(Ref), + Env = maps:get(env, Opts, #{}), + maps:get(Name, Env, Default). + -spec set_env(ranch:ref(), atom(), any()) -> ok. + set_env(Ref, Name, Value) -> Opts = ranch:get_protocol_options(Ref), Env = maps:get(env, Opts, #{}), @@ -80,10 +207,12 @@ set_env(Ref, Name, Value) -> %% Internal. -spec log({log, logger:level(), io:format(), list()}, opts()) -> ok. + log({log, Level, Format, Args}, Opts) -> log(Level, Format, Args, Opts). -spec log(logger:level(), io:format(), list(), opts()) -> ok. + log(Level, Format, Args, #{logger := Logger}) when Logger =/= error_logger -> _ = Logger:Level(Format, Args), diff --git a/src/cowboy_app.erl b/src/cowboy_app.erl index 74cba41..e58e1f6 100644 --- a/src/cowboy_app.erl +++ b/src/cowboy_app.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_bstr.erl b/src/cowboy_bstr.erl index d8041e4..d0e7301 100644 --- a/src/cowboy_bstr.erl +++ b/src/cowboy_bstr.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_children.erl b/src/cowboy_children.erl index 05d39fb..2e00c37 100644 --- a/src/cowboy_children.erl +++ b/src/cowboy_children.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_clear.erl b/src/cowboy_clear.erl index 4f3a234..845fdc1 100644 --- a/src/cowboy_clear.erl +++ b/src/cowboy_clear.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -33,19 +33,9 @@ start_link(Ref, Transport, Opts) -> -spec connection_process(pid(), ranch:ref(), module(), cowboy:opts()) -> ok. connection_process(Parent, Ref, Transport, Opts) -> - ProxyInfo = case maps:get(proxy_header, Opts, false) of - true -> - {ok, ProxyInfo0} = ranch:recv_proxy_header(Ref, 1000), - ProxyInfo0; - false -> - undefined - end, + ProxyInfo = get_proxy_info(Ref, Opts), {ok, Socket} = ranch:handshake(Ref), %% Use cowboy_http2 directly only when 'http' is missing. - %% Otherwise switch to cowboy_http2 from cowboy_http. - %% - %% @todo Extend this option to cowboy_tls and allow disabling - %% the switch to cowboy_http2 in cowboy_http. Also document it. Protocol = case maps:get(protocols, Opts, [http2, http]) of [http2] -> cowboy_http2; [_|_] -> cowboy_http @@ -58,3 +48,11 @@ init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) -> supervisor -> process_flag(trap_exit, true) end, Protocol:init(Parent, Ref, Socket, Transport, ProxyInfo, Opts). + +get_proxy_info(Ref, #{proxy_header := true}) -> + case ranch:recv_proxy_header(Ref, 1000) of + {ok, ProxyInfo} -> ProxyInfo; + {error, closed} -> exit({shutdown, closed}) + end; +get_proxy_info(_, _) -> + undefined. diff --git a/src/cowboy_clock.erl b/src/cowboy_clock.erl index 28f8a1b..b6e39f4 100644 --- a/src/cowboy_clock.erl +++ b/src/cowboy_clock.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -93,7 +93,7 @@ handle_cast(_Msg, State) -> -spec handle_info(any(), State) -> {noreply, State} when State::#state{}. handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef0}) -> %% Cancel the timer in case an external process sent an update message. - _ = erlang:cancel_timer(TRef0), + _ = erlang:cancel_timer(TRef0, [{async, true}, {info, false}]), T = erlang:universaltime(), B2 = update_rfc1123(B1, Prev, T), ets:insert(?MODULE, {rfc1123, B2}), diff --git a/src/cowboy_compress_h.erl b/src/cowboy_compress_h.erl index 374cb6a..785eb0d 100644 --- a/src/cowboy_compress_h.erl +++ b/src/cowboy_compress_h.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -96,11 +96,14 @@ check_req(Req) -> %% Do not compress responses that contain the content-encoding header. check_resp_headers(#{<<"content-encoding">> := _}, State) -> State#state{compress=undefined}; +%% Do not compress responses that contain the etag header. +check_resp_headers(#{<<"etag">> := _}, State) -> + State#state{compress=undefined}; check_resp_headers(_, State) -> State. fold(Commands, State=#state{compress=undefined}) -> - {Commands, State}; + fold_vary_only(Commands, State, []); fold(Commands, State) -> fold(Commands, State, []). @@ -108,32 +111,32 @@ fold([], State, Acc) -> {lists:reverse(Acc), State}; %% We do not compress full sendfile bodies. fold([Response={response, _, _, {sendfile, _, _, _}}|Tail], State, Acc) -> - fold(Tail, State, [Response|Acc]); + fold(Tail, State, [vary_response(Response)|Acc]); %% We compress full responses directly, unless they are lower than %% the configured threshold or we find we are not able to by looking at the headers. fold([Response0={response, _, Headers, Body}|Tail], State0=#state{threshold=CompressThreshold}, Acc) -> case check_resp_headers(Headers, State0) of State=#state{compress=undefined} -> - fold(Tail, State, [Response0|Acc]); + fold(Tail, State, [vary_response(Response0)|Acc]); State1 -> BodyLength = iolist_size(Body), if BodyLength =< CompressThreshold -> - fold(Tail, State1, [Response0|Acc]); + fold(Tail, State1, [vary_response(Response0)|Acc]); true -> {Response, State} = gzip_response(Response0, State1), - fold(Tail, State, [Response|Acc]) + fold(Tail, State, [vary_response(Response)|Acc]) end end; %% Check headers and initiate compression... fold([Response0={headers, _, Headers}|Tail], State0, Acc) -> case check_resp_headers(Headers, State0) of State=#state{compress=undefined} -> - fold(Tail, State, [Response0|Acc]); + fold(Tail, State, [vary_headers(Response0)|Acc]); State1 -> {Response, State} = gzip_headers(Response0, State1), - fold(Tail, State, [Response|Acc]) + fold(Tail, State, [vary_headers(Response)|Acc]) end; %% then compress each data commands individually. fold([Data0={data, _, _}|Tail], State0=#state{compress=gzip}, Acc) -> @@ -161,6 +164,15 @@ fold([SetOptions={set_options, Opts}|Tail], State=#state{ fold([Command|Tail], State, Acc) -> fold(Tail, State, [Command|Acc]). +fold_vary_only([], State, Acc) -> + {lists:reverse(Acc), State}; +fold_vary_only([Response={response, _, _, _}|Tail], State, Acc) -> + fold_vary_only(Tail, State, [vary_response(Response)|Acc]); +fold_vary_only([Response={headers, _, _}|Tail], State, Acc) -> + fold_vary_only(Tail, State, [vary_headers(Response)|Acc]); +fold_vary_only([Command|Tail], State, Acc) -> + fold_vary_only(Tail, State, [Command|Acc]). + buffering_to_zflush(true) -> none; buffering_to_zflush(false) -> sync. @@ -180,10 +192,10 @@ gzip_response({response, Status, Headers, Body}, State) -> after zlib:close(Z) end, - {{response, Status, vary(Headers#{ + {{response, Status, Headers#{ <<"content-length">> => integer_to_binary(iolist_size(GzBody)), <<"content-encoding">> => <<"gzip">> - }), GzBody}, State}. + }, GzBody}, State}. gzip_headers({headers, Status, Headers0}, State) -> Z = zlib:open(), @@ -191,9 +203,15 @@ gzip_headers({headers, Status, Headers0}, State) -> %% @todo It might be good to allow them to be configured? zlib:deflateInit(Z, default, deflated, 31, 8, default), Headers = maps:remove(<<"content-length">>, Headers0), - {{headers, Status, vary(Headers#{ + {{headers, Status, Headers#{ <<"content-encoding">> => <<"gzip">> - })}, State#state{deflate=Z}}. + }}, State#state{deflate=Z}}. + +vary_response({response, Status, Headers, Body}) -> + {response, Status, vary(Headers), Body}. + +vary_headers({headers, Status, Headers}) -> + {headers, Status, vary(Headers)}. %% We must add content-encoding to vary if it's not already there. vary(Headers=#{<<"vary">> := Vary}) -> diff --git a/src/cowboy_constraints.erl b/src/cowboy_constraints.erl index 6509c4b..84ff249 100644 --- a/src/cowboy_constraints.erl +++ b/src/cowboy_constraints.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2014-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_decompress_h.erl b/src/cowboy_decompress_h.erl new file mode 100644 index 0000000..84283e5 --- /dev/null +++ b/src/cowboy_decompress_h.erl @@ -0,0 +1,240 @@ +%% Copyright (c) jdamanalo <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_decompress_h). +-behavior(cowboy_stream). + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). + +-record(state, { + next :: any(), + enabled = true :: boolean(), + ratio_limit :: non_neg_integer() | undefined, + compress = undefined :: undefined | gzip, + inflate = undefined :: undefined | zlib:zstream(), + is_reading = false :: boolean(), + + %% We use a list of binaries to avoid doing unnecessary + %% memory allocations when inflating. We convert to binary + %% when we propagate the data. The data must be reversed + %% before converting to binary or inflating: this is done + %% via the buffer_to_binary/buffer_to_iovec functions. + read_body_buffer = [] :: [binary()], + read_body_is_fin = nofin :: nofin | {fin, non_neg_integer()} +}). + +-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) + -> {cowboy_stream:commands(), #state{}}. +init(StreamID, Req0, Opts) -> + Enabled = maps:get(decompress_enabled, Opts, true), + RatioLimit = maps:get(decompress_ratio_limit, Opts, 20), + {Req, State} = check_and_update_req(Req0), + Inflate = case State#state.compress of + undefined -> + undefined; + gzip -> + Z = zlib:open(), + zlib:inflateInit(Z, 31), + Z + end, + {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts), + fold(Commands, State#state{next=Next, enabled=Enabled, + ratio_limit=RatioLimit, inflate=Inflate}). + +-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +data(StreamID, IsFin, Data, State=#state{next=Next0, inflate=undefined}) -> + {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), + fold(Commands, State#state{next=Next, read_body_is_fin=IsFin}); +data(StreamID, IsFin, Data, State=#state{next=Next0, enabled=false, read_body_buffer=Buffer}) -> + {Commands, Next} = cowboy_stream:data(StreamID, IsFin, + buffer_to_binary([Data|Buffer]), Next0), + fold(Commands, State#state{next=Next, read_body_is_fin=IsFin}); +data(StreamID, IsFin, Data0, State0=#state{next=Next0, ratio_limit=RatioLimit, + inflate=Z, is_reading=true, read_body_buffer=Buffer}) -> + Data = buffer_to_iovec([Data0|Buffer]), + Limit = iolist_size(Data) * RatioLimit, + case cow_deflate:inflate(Z, Data, Limit) of + {error, ErrorType} -> + zlib:close(Z), + Status = case ErrorType of + data_error -> 400; + size_error -> 413 + end, + Commands = [ + {error_response, Status, #{<<"content-length">> => <<"0">>}, <<>>}, + stop + ], + fold(Commands, State0#state{inflate=undefined, read_body_buffer=[]}); + {ok, Inflated} -> + State = case IsFin of + nofin -> + State0; + fin -> + zlib:close(Z), + State0#state{inflate=undefined} + end, + {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Inflated, Next0), + fold(Commands, State#state{next=Next, read_body_buffer=[], + read_body_is_fin=IsFin}) + end; +data(_, IsFin, Data, State=#state{read_body_buffer=Buffer}) -> + {[], State#state{read_body_buffer=[Data|Buffer], read_body_is_fin=IsFin}}. + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +info(StreamID, Info, State=#state{next=Next0, inflate=undefined}) -> + {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0), + fold(Commands, State#state{next=Next}); +info(StreamID, Info={CommandTag, _, _, _, _}, State=#state{next=Next0, read_body_is_fin=IsFin}) + when CommandTag =:= read_body; CommandTag =:= read_body_timeout -> + {Commands0, Next1} = cowboy_stream:info(StreamID, Info, Next0), + {Commands, Next} = data(StreamID, IsFin, <<>>, State#state{next=Next1, is_reading=true}), + fold(Commands ++ Commands0, Next); +info(StreamID, Info={set_options, Opts}, State0=#state{next=Next0, + enabled=Enabled0, ratio_limit=RatioLimit0, is_reading=IsReading}) -> + Enabled = maps:get(decompress_enabled, Opts, Enabled0), + RatioLimit = maps:get(decompress_ratio_limit, Opts, RatioLimit0), + {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0), + %% We can't change the enabled setting after we start reading, + %% otherwise the data becomes garbage. Changing the setting + %% is not treated as an error, it is just ignored. + State = case IsReading of + true -> State0; + false -> State0#state{enabled=Enabled} + end, + fold(Commands, State#state{next=Next, ratio_limit=RatioLimit}); +info(StreamID, Info, State=#state{next=Next0}) -> + {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0), + fold(Commands, State#state{next=Next}). + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> any(). +terminate(StreamID, Reason, #state{next=Next, inflate=Z}) -> + case Z of + undefined -> ok; + _ -> zlib:close(Z) + end, + cowboy_stream:terminate(StreamID, Reason, Next). + +-spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), + cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp + when Resp::cowboy_stream:resp_command(). +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). + +%% Internal. + +%% Check whether the request needs content decoding, and if it does +%% whether it fits our criteria for decoding. We also update the +%% Req to indicate whether content was decoded. +%% +%% We always set the content_decoded value in the Req because it +%% indicates whether content decoding was attempted. +%% +%% A malformed content-encoding header results in no decoding. +check_and_update_req(Req=#{headers := Headers}) -> + ContentDecoded = maps:get(content_decoded, Req, []), + try cowboy_req:parse_header(<<"content-encoding">>, Req) of + %% We only automatically decompress when gzip is the only + %% encoding used. Since it's the only encoding used, we + %% can remove the header entirely before passing the Req + %% forward. + [<<"gzip">>] -> + {Req#{ + headers => maps:remove(<<"content-encoding">>, Headers), + content_decoded => [<<"gzip">>|ContentDecoded] + }, #state{compress=gzip}}; + _ -> + {Req#{content_decoded => ContentDecoded}, + #state{compress=undefined}} + catch _:_ -> + {Req#{content_decoded => ContentDecoded}, + #state{compress=undefined}} + end. + +buffer_to_iovec(Buffer) -> + lists:reverse(Buffer). + +buffer_to_binary(Buffer) -> + iolist_to_binary(lists:reverse(Buffer)). + +fold(Commands, State) -> + fold(Commands, State, []). + +fold([], State, Acc) -> + {lists:reverse(Acc), State}; +fold([{response, Status, Headers0, Body}|Tail], State=#state{enabled=true}, Acc) -> + Headers = add_accept_encoding(Headers0), + fold(Tail, State, [{response, Status, Headers, Body}|Acc]); +fold([{headers, Status, Headers0} | Tail], State=#state{enabled=true}, Acc) -> + Headers = add_accept_encoding(Headers0), + fold(Tail, State, [{headers, Status, Headers}|Acc]); +fold([Command|Tail], State, Acc) -> + fold(Tail, State, [Command|Acc]). + +add_accept_encoding(Headers=#{<<"accept-encoding">> := AcceptEncoding}) -> + try cow_http_hd:parse_accept_encoding(iolist_to_binary(AcceptEncoding)) of + List -> + case lists:keyfind(<<"gzip">>, 1, List) of + %% gzip is excluded but this handler is enabled; we replace. + {_, 0} -> + Replaced = lists:keyreplace(<<"gzip">>, 1, List, {<<"gzip">>, 1000}), + Codings = build_accept_encoding(Replaced), + Headers#{<<"accept-encoding">> => Codings}; + {_, _} -> + Headers; + false -> + case lists:keyfind(<<"*">>, 1, List) of + %% Others are excluded along with gzip; we add. + {_, 0} -> + WithGzip = [{<<"gzip">>, 1000} | List], + Codings = build_accept_encoding(WithGzip), + Headers#{<<"accept-encoding">> => Codings}; + {_, _} -> + Headers; + false -> + Headers#{<<"accept-encoding">> => [AcceptEncoding, <<", gzip">>]} + end + end + catch _:_ -> + %% The accept-encoding header is invalid. Probably empty. We replace it with ours. + Headers#{<<"accept-encoding">> => <<"gzip">>} + end; +add_accept_encoding(Headers) -> + Headers#{<<"accept-encoding">> => <<"gzip">>}. + +%% @todo From cowlib, maybe expose? +qvalue_to_iodata(0) -> <<"0">>; +qvalue_to_iodata(Q) when Q < 10 -> [<<"0.00">>, integer_to_binary(Q)]; +qvalue_to_iodata(Q) when Q < 100 -> [<<"0.0">>, integer_to_binary(Q)]; +qvalue_to_iodata(Q) when Q < 1000 -> [<<"0.">>, integer_to_binary(Q)]; +qvalue_to_iodata(1000) -> <<"1">>. + +%% @todo Should be added to Cowlib. +build_accept_encoding([{ContentCoding, Q}|Tail]) -> + Weight = iolist_to_binary(qvalue_to_iodata(Q)), + Acc = <<ContentCoding/binary, ";q=", Weight/binary>>, + do_build_accept_encoding(Tail, Acc). + +do_build_accept_encoding([{ContentCoding, Q}|Tail], Acc0) -> + Weight = iolist_to_binary(qvalue_to_iodata(Q)), + Acc = <<Acc0/binary, ", ", ContentCoding/binary, ";q=", Weight/binary>>, + do_build_accept_encoding(Tail, Acc); +do_build_accept_encoding([], Acc) -> + Acc. diff --git a/src/cowboy_dynamic_buffer.hrl b/src/cowboy_dynamic_buffer.hrl new file mode 100644 index 0000000..4d05e50 --- /dev/null +++ b/src/cowboy_dynamic_buffer.hrl @@ -0,0 +1,80 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% These functions are common to cowboy_http, cowboy_http2 and +%% cowboy_websocket. It requires the options and the state +%% to use the same field names. + +%% Experiments have shown that the size of the 'buffer' can greatly +%% impact performance: a buffer too small leads to more messages +%% being handled and typically more binary appends; and a buffer +%% too large results in inefficient use of memory which in turn +%% reduces the throughput, presumably because large binary appends +%% are not as efficient as smaller ones, and because while the +%% buffer gets allocated only when there is data, the allocated +%% size remains until the binary is GC and so under-use hurts. +%% +%% The performance of a given 'buffer' size will also depend on +%% how the client is sending data, and on the protocol. For example, +%% HTTP/1.1 doesn't need a very large 'buffer' size for reading +%% request headers, but it does need one for reading large request +%% bodies. At the same time, HTTP/2 performs best reading large +%% request bodies when the 'buffer' size is about half that of +%% HTTP/1.1. +%% +%% It therefore becomes important to resize the buffer dynamically +%% depending on what is currently going on. We do this based on +%% the size of data packets we received from the transport. We +%% maintain a moving average and when that moving average is +%% 90% of the current 'buffer' size, we double the 'buffer' size. +%% When things slow down and the moving average falls below +%% 40% of the current 'buffer' size, we halve the 'buffer' size. +%% +%% To calculate the moving average we do (MovAvg + DataLen) div 2. +%% This means that the moving average will change very quickly when +%% DataLen increases or decreases rapidly. That's OK, we want to +%% be reactive, but also setting the buffer size is a pretty fast +%% operation. The formula could be changed to the following if it +%% became a problem: (MovAvg * N + DataLen) div (N + 1). +%% +%% Note that this works best when active,N uses low values of N. +%% We don't want to accumulate too much data because we resize +%% the buffer. + +init_dynamic_buffer_size(#{dynamic_buffer_initial_size := DynamicBuffer}) -> + DynamicBuffer; +init_dynamic_buffer_size(#{dynamic_buffer := {LowDynamicBuffer, _}}) -> + LowDynamicBuffer; +init_dynamic_buffer_size(_) -> + false. + +maybe_resize_buffer(State=#state{dynamic_buffer_size=false}, _) -> + State; +maybe_resize_buffer(State=#state{transport=Transport, socket=Socket, + opts=#{dynamic_buffer := {LowDynamicBuffer, HighDynamicBuffer}}, + dynamic_buffer_size=BufferSize0, dynamic_buffer_moving_average=MovingAvg0}, Data) -> + DataLen = byte_size(Data), + MovingAvg = (MovingAvg0 + DataLen) div 2, + if + BufferSize0 < HighDynamicBuffer andalso MovingAvg > BufferSize0 * 0.9 -> + BufferSize = min(BufferSize0 * 2, HighDynamicBuffer), + ok = maybe_socket_error(State, Transport:setopts(Socket, [{buffer, BufferSize}])), + State#state{dynamic_buffer_moving_average=MovingAvg, dynamic_buffer_size=BufferSize}; + BufferSize0 > LowDynamicBuffer andalso MovingAvg < BufferSize0 * 0.4 -> + BufferSize = max(BufferSize0 div 2, LowDynamicBuffer), + ok = maybe_socket_error(State, Transport:setopts(Socket, [{buffer, BufferSize}])), + State#state{dynamic_buffer_moving_average=MovingAvg, dynamic_buffer_size=BufferSize}; + true -> + State#state{dynamic_buffer_moving_average=MovingAvg} + end. diff --git a/src/cowboy_handler.erl b/src/cowboy_handler.erl index c0f7ff7..1989512 100644 --- a/src/cowboy_handler.erl +++ b/src/cowboy_handler.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl index c9bceed..10eb519 100644 --- a/src/cowboy_http.erl +++ b/src/cowboy_http.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,9 +12,12 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +%% @todo Worth renaming to cowboy_http1. +%% @todo Change use of cow_http to cow_http1 where appropriate. -module(cowboy_http). -export([init/6]). +-export([loop/1]). -export([system_continue/3]). -export([system_terminate/4]). @@ -22,11 +25,16 @@ -type opts() :: #{ active_n => pos_integer(), + alpn_default_protocol => http | http2, chunked => boolean(), compress_buffering => boolean(), compress_threshold => non_neg_integer(), connection_type => worker | supervisor, + dynamic_buffer => false | {pos_integer(), pos_integer()}, + dynamic_buffer_initial_average => non_neg_integer(), + dynamic_buffer_initial_size => pos_integer(), env => cowboy_middleware:env(), + hibernate => boolean(), http10_keepalive => boolean(), idle_timeout => timeout(), inactivity_timeout => timeout(), @@ -45,8 +53,10 @@ metrics_req_filter => fun((cowboy_req:req()) -> map()), metrics_resp_headers_filter => fun((cowboy:http_headers()) -> cowboy:http_headers()), middlewares => [module()], + protocols => [http | http2], proxy_header => boolean(), request_timeout => timeout(), + reset_idle_timeout_on_send => boolean(), sendfile => boolean(), shutdown_timeout => timeout(), stream_handlers => [module()], @@ -134,6 +144,10 @@ %% Flow requested for the current stream. flow = infinity :: non_neg_integer() | infinity, + %% Dynamic buffer moving average and current buffer size. + dynamic_buffer_size :: pos_integer() | false, + dynamic_buffer_moving_average :: non_neg_integer(), + %% Identifier for the stream currently being written. %% Note that out_streamid =< in_streamid. out_streamid = 1 :: pos_integer(), @@ -157,9 +171,11 @@ -spec init(pid(), ranch:ref(), inet:socket(), module(), ranch_proxy_header:proxy_info(), cowboy:opts()) -> ok. init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> - Peer0 = Transport:peername(Socket), - Sock0 = Transport:sockname(Socket), - Cert1 = case Transport:name() of + {ok, Peer} = maybe_socket_error(undefined, Transport:peername(Socket), + 'A socket error occurred when retrieving the peer name.'), + {ok, Sock} = maybe_socket_error(undefined, Transport:sockname(Socket), + 'A socket error occurred when retrieving the sock name.'), + CertResult = case Transport:name() of ssl -> case ssl:peercert(Socket) of {error, no_peercert} -> @@ -170,36 +186,33 @@ init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> _ -> {ok, undefined} end, - case {Peer0, Sock0, Cert1} of - {{ok, Peer}, {ok, Sock}, {ok, Cert}} -> - State = #state{ - parent=Parent, ref=Ref, socket=Socket, - transport=Transport, proxy_header=ProxyHeader, opts=Opts, - peer=Peer, sock=Sock, cert=Cert, - last_streamid=maps:get(max_keepalive, Opts, 1000)}, - setopts_active(State), - loop(set_timeout(State, request_timeout)); - {{error, Reason}, _, _} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the peer name.'}); - {_, {error, Reason}, _} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the sock name.'}); - {_, _, {error, Reason}} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the client TLS certificate.'}) - end. + {ok, Cert} = maybe_socket_error(undefined, CertResult, + 'A socket error occurred when retrieving the client TLS certificate.'), + State = #state{ + parent=Parent, ref=Ref, socket=Socket, + transport=Transport, proxy_header=ProxyHeader, opts=Opts, + peer=Peer, sock=Sock, cert=Cert, + dynamic_buffer_size=init_dynamic_buffer_size(Opts), + dynamic_buffer_moving_average=maps:get(dynamic_buffer_initial_average, Opts, 0), + last_streamid=maps:get(max_keepalive, Opts, 1000)}, + safe_setopts_active(State), + before_loop(set_timeout(State, request_timeout)). + +-include("cowboy_dynamic_buffer.hrl"). setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> - N = maps:get(active_n, Opts, 100), + N = maps:get(active_n, Opts, 1), Transport:setopts(Socket, [{active, N}]). +safe_setopts_active(State) -> + ok = maybe_socket_error(State, setopts_active(State)). + active(State) -> - setopts_active(State), + safe_setopts_active(State), State#state{active=true}. passive(State=#state{socket=Socket, transport=Transport}) -> - Transport:setopts(Socket, [{active, false}]), + ok = maybe_socket_error(State, Transport:setopts(Socket, [{active, false}])), Messages = Transport:messages(), flush_passive(Socket, Messages), State#state{active=false}. @@ -214,6 +227,13 @@ flush_passive(Socket, Messages) -> ok end. +before_loop(State=#state{opts=#{hibernate := true}}) -> + proc_lib:hibernate(?MODULE, loop, [State]); +before_loop(State) -> + loop(State). + +-spec loop(#state{}) -> ok. + loop(State=#state{parent=Parent, socket=Socket, transport=Transport, opts=Opts, buffer=Buffer, timer=TimerRef, children=Children, in_streamid=InStreamID, last_streamid=LastStreamID}) -> @@ -222,11 +242,13 @@ loop(State=#state{parent=Parent, socket=Socket, transport=Transport, opts=Opts, receive %% Discard data coming in after the last request %% we want to process was received fully. - {OK, Socket, _} when OK =:= element(1, Messages), InStreamID > LastStreamID -> - loop(State); + {OK, Socket, Data} when OK =:= element(1, Messages), InStreamID > LastStreamID -> + State1 = maybe_resize_buffer(State, Data), + before_loop(State1); %% Socket messages. {OK, Socket, Data} when OK =:= element(1, Messages) -> - parse(<< Buffer/binary, Data/binary >>, State); + State1 = maybe_resize_buffer(State, Data), + parse(<< Buffer/binary, Data/binary >>, State1); {Closed, Socket} when Closed =:= element(2, Messages) -> terminate(State, {socket_error, closed, 'The socket has been closed.'}); {Error, Socket, Reason} when Error =:= element(3, Messages) -> @@ -234,45 +256,60 @@ loop(State=#state{parent=Parent, socket=Socket, transport=Transport, opts=Opts, {Passive, Socket} when Passive =:= element(4, Messages); %% Hardcoded for compatibility with Ranch 1.x. Passive =:= tcp_passive; Passive =:= ssl_passive -> - setopts_active(State), - loop(State); + safe_setopts_active(State), + before_loop(State); %% Timeouts. {timeout, Ref, {shutdown, Pid}} -> cowboy_children:shutdown_timeout(Children, Ref, Pid), - loop(State); + before_loop(State); {timeout, TimerRef, Reason} -> timeout(State, Reason); {timeout, _, _} -> - loop(State); + before_loop(State); %% System messages. {'EXIT', Parent, shutdown} -> Reason = {stop, {exit, shutdown}, 'Parent process requested shutdown.'}, - loop(initiate_closing(State, Reason)); + before_loop(initiate_closing(State, Reason)); {'EXIT', Parent, Reason} -> terminate(State, {stop, {exit, Reason}, 'Parent process terminated.'}); {system, From, Request} -> sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State); %% Messages pertaining to a stream. {{Pid, StreamID}, Msg} when Pid =:= self() -> - loop(info(State, StreamID, Msg)); + before_loop(info(State, StreamID, Msg)); %% Exit signal from children. Msg = {'EXIT', Pid, _} -> - loop(down(State, Pid, Msg)); + before_loop(down(State, Pid, Msg)); %% Calls from supervisor module. {'$gen_call', From, Call} -> cowboy_children:handle_supervisor_call(Call, From, Children, ?MODULE), - loop(State); + before_loop(State); %% Unknown messages. Msg -> cowboy:log(warning, "Received stray message ~p.~n", [Msg], Opts), - loop(State) + before_loop(State) after InactivityTimeout -> terminate(State, {internal_error, timeout, 'No message or data received before timeout.'}) end. -%% We do not set request_timeout if there are active streams. -set_timeout(State=#state{streams=[_|_]}, request_timeout) -> - State; +%% For HTTP/1.1 we have two types of timeouts: the request_timeout +%% is used when there is no currently ongoing request. This means +%% that we are not currently sending or receiving data and that +%% the next data to be received will be a new request. The +%% request_timeout is set once when we no longer have ongoing +%% requests, and runs until the full set of request headers +%% is received. It is not reset. +%% +%% After that point we use the idle_timeout. We continue using +%% the idle_timeout if pipelined requests come in: we are doing +%% work and just want to ensure the socket is not half-closed. +%% We continue using the idle_timeout up until there is no +%% ongoing request. This includes requests that were processed +%% and for which we only want to skip the body. Once the body +%% has been read fully we can go back to request_timeout. The +%% idle_timeout is reset every time we receive data and, +%% optionally, every time we send data. + %% We do not set request_timeout if we are skipping a body. set_timeout(State=#state{in_state=#ps_body{}}, request_timeout) -> State; @@ -282,6 +319,7 @@ set_timeout(State=#state{streams=[], in_state=InState}, idle_timeout) when element(1, InState) =/= ps_body -> State; %% Otherwise we can set the timeout. +%% @todo Don't do this so often, use a strategy similar to Websocket/H2 if possible. set_timeout(State0=#state{opts=Opts, overriden_opts=Override}, Name) -> State = cancel_timeout(State0), Default = case Name of @@ -299,6 +337,14 @@ set_timeout(State0=#state{opts=Opts, overriden_opts=Override}, Name) -> end, State#state{timer=TimerRef}. +maybe_reset_idle_timeout(State=#state{opts=Opts}) -> + case maps:get(reset_idle_timeout_on_send, Opts, false) of + true -> + set_timeout(State, idle_timeout); + false -> + State + end. + cancel_timeout(State=#state{timer=TimerRef}) -> ok = case TimerRef of undefined -> @@ -306,7 +352,7 @@ cancel_timeout(State=#state{timer=TimerRef}) -> _ -> %% Do a synchronous cancel and remove the message if any %% to avoid receiving stray messages. - _ = erlang:cancel_timer(TimerRef), + _ = erlang:cancel_timer(TimerRef, [{async, false}, {info, false}]), receive {timeout, TimerRef, _} -> ok after 0 -> @@ -327,12 +373,12 @@ timeout(State, idle_timeout) -> 'Connection idle longer than configuration allows.'}). parse(<<>>, State) -> - loop(State#state{buffer= <<>>}); + before_loop(State#state{buffer= <<>>}); %% Do not process requests that come in after the last request %% and discard the buffer if any to save memory. parse(_, State=#state{in_streamid=InStreamID, in_state=#ps_request_line{}, last_streamid=LastStreamID}) when InStreamID > LastStreamID -> - loop(State#state{buffer= <<>>}); + before_loop(State#state{buffer= <<>>}); parse(Buffer, State=#state{in_state=#ps_request_line{empty_lines=EmptyLines}}) -> after_parse(parse_request(Buffer, State, EmptyLines)); parse(Buffer, State=#state{in_state=PS=#ps_header{headers=Headers, name=undefined}}) -> @@ -355,16 +401,27 @@ after_parse({request, Req=#{streamid := StreamID, method := Method, TE = maps:get(<<"te">>, Headers, undefined), Streams = [#stream{id=StreamID, state=StreamState, method=Method, version=Version, te=TE}|Streams0], - State1 = case maybe_req_close(State0, Headers, Version) of - close -> State0#state{streams=Streams, last_streamid=StreamID, flow=Flow}; - keepalive -> State0#state{streams=Streams, flow=Flow} + State1 = State0#state{streams=Streams, flow=Flow}, + State2 = case maybe_req_close(State1, Headers, Version) of + close -> + State1#state{last_streamid=StreamID}; + keepalive -> + State1; + bad_connection_header -> + error_terminate(400, State1, {connection_error, protocol_error, + 'The Connection header is invalid. (RFC7230 6.1)'}) end, - State = set_timeout(State1, idle_timeout), + State = set_timeout(State2, idle_timeout), parse(Buffer, commands(State, StreamID, Commands)) catch Class:Exception:Stacktrace -> cowboy:log(cowboy_stream:make_error_log(init, [StreamID, Req, Opts], Class, Exception, Stacktrace), Opts), + %% We do not reset the idle timeout on send here + %% because an error occurred in the application. While we + %% are keeping the connection open for further requests we + %% do not want to keep the connection up too long if no + %% additional requests come in. early_error(500, State0, {internal_error, {Class, Exception}, 'Unhandled exception in cowboy_stream:init/3.'}, Req), parse(Buffer, State0) @@ -377,10 +434,7 @@ after_parse({data, StreamID, IsFin, Data, State0=#state{opts=Opts, buffer=Buffer {Commands, StreamState} -> Streams = lists:keyreplace(StreamID, #stream.id, Streams0, Stream#stream{state=StreamState}), - State1 = set_timeout(State0, case IsFin of - fin -> request_timeout; - nofin -> idle_timeout - end), + State1 = set_timeout(State0, idle_timeout), State = update_flow(IsFin, Data, State1#state{streams=Streams}), parse(Buffer, commands(State, StreamID, Commands)) catch Class:Exception:Stacktrace -> @@ -393,13 +447,13 @@ after_parse({data, StreamID, IsFin, Data, State0=#state{opts=Opts, buffer=Buffer end; %% No corresponding stream. We must skip the body of the previous request %% in order to process the next one. -after_parse({data, _, IsFin, _, State}) -> - loop(set_timeout(State, case IsFin of +after_parse({data, _, IsFin, _, State=#state{buffer=Buffer}}) -> + parse(Buffer, set_timeout(State, case IsFin of fin -> request_timeout; nofin -> idle_timeout end)); after_parse({more, State}) -> - loop(set_timeout(State, idle_timeout)). + before_loop(set_timeout(State, idle_timeout)). update_flow(fin, _, State) -> %% This function is only called after parsing, therefore we @@ -459,8 +513,13 @@ parse_request(Buffer, State=#state{opts=Opts, in_streamid=InStreamID}, EmptyLine 'The TRACE method is currently not implemented. (RFC7231 4.3.8)'}); %% Accept direct HTTP/2 only at the beginning of the connection. << "PRI * HTTP/2.0\r\n", _/bits >> when InStreamID =:= 1 -> - %% @todo Might be worth throwing to get a clean stacktrace. - http2_upgrade(State, Buffer); + case lists:member(http2, maps:get(protocols, Opts, [http2, http])) of + true -> + http2_upgrade(State, Buffer); + false -> + error_terminate(501, State, {connection_error, no_error, + 'Prior knowledge upgrade to HTTP/2 is disabled by configuration.'}) + end; _ -> parse_method(Buffer, State, <<>>, maps:get(max_method_length, Opts, 32)) @@ -748,41 +807,44 @@ default_port(_) -> 80. %% End of request parsing. request(Buffer, State0=#state{ref=Ref, transport=Transport, peer=Peer, sock=Sock, cert=Cert, - proxy_header=ProxyHeader, in_streamid=StreamID, in_state= + opts=Opts, proxy_header=ProxyHeader, in_streamid=StreamID, in_state= PS=#ps_header{method=Method, path=Path, qs=Qs, version=Version}}, - Headers0, Host, Port) -> + Headers, Host, Port) -> Scheme = case Transport:secure() of true -> <<"https">>; false -> <<"http">> end, - {Headers, HasBody, BodyLength, TDecodeFun, TDecodeState} = case Headers0 of + {HasBody, BodyLength, TDecodeFun, TDecodeState} = case Headers of + #{<<"transfer-encoding">> := _, <<"content-length">> := _} -> + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers}}, + {stream_error, protocol_error, + 'The request had both transfer-encoding and content-length headers. (RFC7230 3.3.3)'}); #{<<"transfer-encoding">> := TransferEncoding0} -> try cow_http_hd:parse_transfer_encoding(TransferEncoding0) of [<<"chunked">>] -> - {maps:remove(<<"content-length">>, Headers0), - true, undefined, fun cow_http_te:stream_chunked/2, {0, 0}}; + {true, undefined, fun cow_http_te:stream_chunked/2, {0, 0}}; _ -> - error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers}}, {stream_error, protocol_error, 'Cowboy only supports transfer-encoding: chunked. (RFC7230 3.3.1)'}) catch _:_ -> - error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers}}, {stream_error, protocol_error, 'The transfer-encoding header is invalid. (RFC7230 3.3.1)'}) end; #{<<"content-length">> := <<"0">>} -> - {Headers0, false, 0, undefined, undefined}; + {false, 0, undefined, undefined}; #{<<"content-length">> := BinLength} -> Length = try cow_http_hd:parse_content_length(BinLength) catch _:_ -> - error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers}}, {stream_error, protocol_error, 'The content-length header is invalid. (RFC7230 3.3.2)'}) end, - {Headers0, true, Length, fun cow_http_te:stream_identity/2, {0, Length}}; + {true, Length, fun cow_http_te:stream_identity/2, {0, Length}}; _ -> - {Headers0, false, 0, undefined, undefined} + {false, 0, undefined, undefined} end, Req0 = #{ ref => Ref, @@ -809,7 +871,7 @@ request(Buffer, State0=#state{ref=Ref, transport=Transport, peer=Peer, sock=Sock undefined -> Req0; _ -> Req0#{proxy_header => ProxyHeader} end, - case is_http2_upgrade(Headers, Version) of + case is_http2_upgrade(Headers, Version, Opts) of false -> State = case HasBody of true -> @@ -831,12 +893,13 @@ request(Buffer, State0=#state{ref=Ref, transport=Transport, peer=Peer, sock=Sock %% HTTP/2 upgrade. -%% @todo We must not upgrade to h2c over a TLS connection. is_http2_upgrade(#{<<"connection">> := Conn, <<"upgrade">> := Upgrade, - <<"http2-settings">> := HTTP2Settings}, 'HTTP/1.1') -> + <<"http2-settings">> := HTTP2Settings}, 'HTTP/1.1', Opts) -> Conns = cow_http_hd:parse_connection(Conn), - case {lists:member(<<"upgrade">>, Conns), lists:member(<<"http2-settings">>, Conns)} of - {true, true} -> + case lists:member(<<"upgrade">>, Conns) + andalso lists:member(<<"http2-settings">>, Conns) + andalso lists:member(http2, maps:get(protocols, Opts, [http2, http])) of + true -> Protocols = cow_http_hd:parse_upgrade(Upgrade), case lists:member(<<"h2c">>, Protocols) of true -> @@ -847,17 +910,17 @@ is_http2_upgrade(#{<<"connection">> := Conn, <<"upgrade">> := Upgrade, _ -> false end; -is_http2_upgrade(_, _) -> +is_http2_upgrade(_, _, _) -> false. %% Prior knowledge upgrade, without an HTTP/1.1 request. http2_upgrade(State=#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, - proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert}, Buffer) -> + proxy_header=ProxyHeader, peer=Peer, sock=Sock, cert=Cert}, Buffer) -> case Transport:secure() of false -> _ = cancel_timeout(State), - cowboy_http2:init(Parent, Ref, Socket, Transport, - ProxyHeader, Opts, Peer, Sock, Cert, Buffer); + cowboy_http2:init(Parent, Ref, Socket, Transport, ProxyHeader, + opts_for_upgrade(State), Peer, Sock, Cert, Buffer); true -> error_terminate(400, State, {connection_error, protocol_error, 'Clients that support HTTP/2 over TLS MUST use ALPN. (RFC7540 3.4)'}) @@ -865,22 +928,37 @@ http2_upgrade(State=#state{parent=Parent, ref=Ref, socket=Socket, transport=Tran %% Upgrade via an HTTP/1.1 request. http2_upgrade(State=#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, - proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert}, + proxy_header=ProxyHeader, peer=Peer, sock=Sock, cert=Cert}, Buffer, HTTP2Settings, Req) -> - %% @todo - %% However if the client sent a body, we need to read the body in full - %% and if we can't do that, return a 413 response. Some options are in order. - %% Always half-closed stream coming from this side. - try cow_http_hd:parse_http2_settings(HTTP2Settings) of - Settings -> - _ = cancel_timeout(State), - cowboy_http2:init(Parent, Ref, Socket, Transport, - ProxyHeader, Opts, Peer, Sock, Cert, Buffer, Settings, Req) - catch _:_ -> - error_terminate(400, State, {connection_error, protocol_error, - 'The HTTP2-Settings header must contain a base64 SETTINGS payload. (RFC7540 3.2, RFC7540 3.2.1)'}) + case Transport:secure() of + false -> + %% @todo + %% However if the client sent a body, we need to read the body in full + %% and if we can't do that, return a 413 response. Some options are in order. + %% Always half-closed stream coming from this side. + try cow_http_hd:parse_http2_settings(HTTP2Settings) of + Settings -> + _ = cancel_timeout(State), + cowboy_http2:init(Parent, Ref, Socket, Transport, ProxyHeader, + opts_for_upgrade(State), Peer, Sock, Cert, Buffer, Settings, Req) + catch _:_ -> + error_terminate(400, State, {connection_error, protocol_error, + 'The HTTP2-Settings header must contain a base64 SETTINGS payload. (RFC7540 3.2, RFC7540 3.2.1)'}) + end; + true -> + error_terminate(400, State, {connection_error, protocol_error, + 'Clients that support HTTP/2 over TLS MUST use ALPN. (RFC7540 3.4)'}) end. +opts_for_upgrade(#state{opts=Opts, dynamic_buffer_size=false}) -> + Opts; +opts_for_upgrade(#state{opts=Opts, dynamic_buffer_size=Size, + dynamic_buffer_moving_average=MovingAvg}) -> + Opts#{ + dynamic_buffer_initial_average => MovingAvg, + dynamic_buffer_initial_size => Size + }. + %% Request body parsing. parse_body(Buffer, State=#state{in_streamid=StreamID, in_state= @@ -953,6 +1031,11 @@ info(State=#state{opts=Opts, streams=Streams0}, StreamID, Msg) -> end. %% Commands. +%% +%% The order in which the commands are given matters. Cowboy may +%% stop processing commands after the 'stop' command or when an +%% error occurred, such as a socket error. Critical commands such +%% as 'spawn' should always be given first. commands(State, _, []) -> State; @@ -1006,19 +1089,20 @@ commands(State=#state{out_state=wait, out_streamid=StreamID}, StreamID, commands(State, StreamID, [{error_response, _, _, _}|Tail]) -> commands(State, StreamID, Tail); %% Send an informational response. -commands(State=#state{socket=Socket, transport=Transport, out_state=wait, streams=Streams}, +commands(State0=#state{socket=Socket, transport=Transport, out_state=wait, streams=Streams}, StreamID, [{inform, StatusCode, Headers}|Tail]) -> %% @todo I'm pretty sure the last stream in the list is the one we want %% considering all others are queued. #stream{version=Version} = lists:keyfind(StreamID, #stream.id, Streams), _ = case Version of 'HTTP/1.1' -> - Transport:send(Socket, cow_http:response(StatusCode, 'HTTP/1.1', - headers_to_list(Headers))); + ok = maybe_socket_error(State0, Transport:send(Socket, + cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers)))); %% Do not send informational responses to HTTP/1.0 clients. (RFC7231 6.2) 'HTTP/1.0' -> ok end, + State = maybe_reset_idle_timeout(State0), commands(State, StreamID, Tail); %% Send a full response. %% @@ -1031,17 +1115,18 @@ commands(State0=#state{socket=Socket, transport=Transport, out_state=wait, strea %% considering all others are queued. #stream{version=Version} = lists:keyfind(StreamID, #stream.id, Streams), {State1, Headers} = connection(State0, Headers0, StreamID, Version), - State = State1#state{out_state=done}, + State2 = State1#state{out_state=done}, %% @todo Ensure content-length is set. 204 must never have content-length set. Response = cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers)), %% @todo 204 and 304 responses must not include a response body. (RFC7230 3.3.1, RFC7230 3.3.2) case Body of {sendfile, _, _, _} -> - Transport:send(Socket, Response), - sendfile(State, Body); + ok = maybe_socket_error(State2, Transport:send(Socket, Response)), + sendfile(State2, Body); _ -> - Transport:send(Socket, [Response, Body]) + ok = maybe_socket_error(State2, Transport:send(Socket, [Response, Body])) end, + State = maybe_reset_idle_timeout(State2), commands(State, StreamID, Tail); %% Send response headers and initiate chunked encoding or streaming. commands(State0=#state{socket=Socket, transport=Transport, @@ -1078,8 +1163,10 @@ commands(State0=#state{socket=Socket, transport=Transport, trailers -> Headers1; _ -> maps:remove(<<"trailer">>, Headers1) end, - {State, Headers} = connection(State1, Headers2, StreamID, Version), - Transport:send(Socket, cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers))), + {State2, Headers} = connection(State1, Headers2, StreamID, Version), + ok = maybe_socket_error(State2, Transport:send(Socket, + cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers)))), + State = maybe_reset_idle_timeout(State2), commands(State, StreamID, Tail); %% Send a response body chunk. %% @todo We need to kill the stream if it tries to send data before headers. @@ -1098,27 +1185,33 @@ commands(State0=#state{socket=Socket, transport=Transport, streams=Streams0, out Stream0=#stream{method= <<"HEAD">>} -> Stream0; Stream0 when Size =:= 0, IsFin =:= fin, OutState =:= chunked -> - Transport:send(Socket, <<"0\r\n\r\n">>), + ok = maybe_socket_error(State0, + Transport:send(Socket, <<"0\r\n\r\n">>)), Stream0; Stream0 when Size =:= 0 -> Stream0; Stream0 when is_tuple(Data), OutState =:= chunked -> - Transport:send(Socket, [integer_to_binary(Size, 16), <<"\r\n">>]), + ok = maybe_socket_error(State0, + Transport:send(Socket, [integer_to_binary(Size, 16), <<"\r\n">>])), sendfile(State0, Data), - Transport:send(Socket, - case IsFin of - fin -> <<"\r\n0\r\n\r\n">>; - nofin -> <<"\r\n">> - end), + ok = maybe_socket_error(State0, + Transport:send(Socket, + case IsFin of + fin -> <<"\r\n0\r\n\r\n">>; + nofin -> <<"\r\n">> + end) + ), Stream0; Stream0 when OutState =:= chunked -> - Transport:send(Socket, [ - integer_to_binary(Size, 16), <<"\r\n">>, Data, - case IsFin of - fin -> <<"\r\n0\r\n\r\n">>; - nofin -> <<"\r\n">> - end - ]), + ok = maybe_socket_error(State0, + Transport:send(Socket, [ + integer_to_binary(Size, 16), <<"\r\n">>, Data, + case IsFin of + fin -> <<"\r\n0\r\n\r\n">>; + nofin -> <<"\r\n">> + end + ]) + ), Stream0; Stream0 when OutState =:= streaming -> #stream{local_sent_size=SentSize0, local_expected_size=ExpectedSize} = Stream0, @@ -1130,34 +1223,39 @@ commands(State0=#state{socket=Socket, transport=Transport, streams=Streams0, out is_tuple(Data) -> sendfile(State0, Data); true -> - Transport:send(Socket, Data) + ok = maybe_socket_error(State0, Transport:send(Socket, Data)) end, Stream0#stream{local_sent_size=SentSize} end, - State = case IsFin of + State1 = case IsFin of fin -> State0#state{out_state=done}; nofin -> State0 end, + State = maybe_reset_idle_timeout(State1), Streams = lists:keyreplace(StreamID, #stream.id, Streams0, Stream), commands(State#state{streams=Streams}, StreamID, Tail); -commands(State=#state{socket=Socket, transport=Transport, streams=Streams, out_state=OutState}, +commands(State0=#state{socket=Socket, transport=Transport, streams=Streams, out_state=OutState}, StreamID, [{trailers, Trailers}|Tail]) -> case stream_te(OutState, lists:keyfind(StreamID, #stream.id, Streams)) of trailers -> - Transport:send(Socket, [ - <<"0\r\n">>, - cow_http:headers(maps:to_list(Trailers)), - <<"\r\n">> - ]); + ok = maybe_socket_error(State0, + Transport:send(Socket, [ + <<"0\r\n">>, + cow_http:headers(maps:to_list(Trailers)), + <<"\r\n">> + ]) + ); no_trailers -> - Transport:send(Socket, <<"0\r\n\r\n">>); + ok = maybe_socket_error(State0, + Transport:send(Socket, <<"0\r\n\r\n">>)); not_chunked -> ok end, - commands(State#state{out_state=done}, StreamID, Tail); + State = maybe_reset_idle_timeout(State0#state{out_state=done}), + commands(State, StreamID, Tail); %% Protocol takeover. commands(State0=#state{ref=Ref, parent=Parent, socket=Socket, transport=Transport, - out_state=OutState, opts=Opts, buffer=Buffer, children=Children}, StreamID, + out_state=OutState, buffer=Buffer, children=Children}, StreamID, [{switch_protocol, Headers, Protocol, InitialState}|_Tail]) -> %% @todo If there's streams opened after this one, fail instead of 101. State1 = cancel_timeout(State0), @@ -1174,28 +1272,26 @@ commands(State0=#state{ref=Ref, parent=Parent, socket=Socket, transport=Transpor _ -> State end, #stream{state=StreamState} = lists:keyfind(StreamID, #stream.id, Streams), - %% @todo We need to shutdown processes here first. stream_call_terminate(StreamID, switch_protocol, StreamState, State), %% Terminate children processes and flush any remaining messages from the mailbox. cowboy_children:terminate(Children), flush(Parent), - Protocol:takeover(Parent, Ref, Socket, Transport, Opts, Buffer, InitialState); + %% Turn off the trap_exit process flag + %% since this process will no longer be a supervisor. + process_flag(trap_exit, false), + Protocol:takeover(Parent, Ref, Socket, Transport, + opts_for_upgrade(State), Buffer, InitialState); %% Set options dynamically. -commands(State0=#state{overriden_opts=Opts}, - StreamID, [{set_options, SetOpts}|Tail]) -> - State1 = case SetOpts of - #{idle_timeout := IdleTimeout} -> - set_timeout(State0#state{overriden_opts=Opts#{idle_timeout => IdleTimeout}}, +commands(State0, StreamID, [{set_options, SetOpts}|Tail]) -> + State = maps:fold(fun + (chunked, Chunked, StateF=#state{overriden_opts=Opts}) -> + StateF#state{overriden_opts=Opts#{chunked => Chunked}}; + (idle_timeout, IdleTimeout, StateF=#state{overriden_opts=Opts}) -> + set_timeout(StateF#state{overriden_opts=Opts#{idle_timeout => IdleTimeout}}, idle_timeout); - _ -> - State0 - end, - State = case SetOpts of - #{chunked := Chunked} -> - State1#state{overriden_opts=Opts#{chunked => Chunked}}; - _ -> - State1 - end, + (_, _, StateF) -> + StateF + end, State0, SetOpts), commands(State, StreamID, Tail); %% Stream shutdown. commands(State, StreamID, [stop|Tail]) -> @@ -1238,10 +1334,12 @@ sendfile(State=#state{socket=Socket, transport=Transport, opts=Opts}, {sendfile, Offset, Bytes, Path}) -> try %% When sendfile is disabled we explicitly use the fallback. - _ = case maps:get(sendfile, Opts, true) of - true -> Transport:sendfile(Socket, Path, Offset, Bytes); - false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) - end, + {ok, _} = maybe_socket_error(State, + case maps:get(sendfile, Opts, true) of + true -> Transport:sendfile(Socket, Path, Offset, Bytes); + false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) + end + ), ok catch _:_ -> terminate(State, {socket_error, sendfile_crash, @@ -1312,20 +1410,24 @@ stream_terminate(State0=#state{opts=Opts, in_streamid=InStreamID, in_state=InSta end. stream_next(State0=#state{opts=Opts, active=Active, out_streamid=OutStreamID, streams=Streams}) -> + %% Enable active mode again if it was disabled. + State1 = case Active of + true -> State0; + false -> active(State0) + end, NextOutStreamID = OutStreamID + 1, case lists:keyfind(NextOutStreamID, #stream.id, Streams) of false -> - State0#state{out_streamid=NextOutStreamID, out_state=wait}; + State = State1#state{out_streamid=NextOutStreamID, out_state=wait}, + %% There are no streams remaining. We therefore can + %% and want to switch back to the request_timeout. + set_timeout(State, request_timeout); #stream{queue=Commands} -> - State = case Active of - true -> State0; - false -> active(State0) - end, %% @todo Remove queue from the stream. %% We set the flow to the initial flow size even though %% we might have sent some data through already due to pipelining. Flow = maps:get(initial_stream_flow_size, Opts, 65535), - commands(State#state{flow=Flow, out_streamid=NextOutStreamID, out_state=wait}, + commands(State1#state{flow=Flow, out_streamid=NextOutStreamID, out_state=wait}, NextOutStreamID, Commands) end. @@ -1341,17 +1443,23 @@ stream_call_terminate(StreamID, Reason, StreamState, #state{opts=Opts}) -> maybe_req_close(#state{opts=#{http10_keepalive := false}}, _, 'HTTP/1.0') -> close; maybe_req_close(_, #{<<"connection">> := Conn}, 'HTTP/1.0') -> - Conns = cow_http_hd:parse_connection(Conn), - case lists:member(<<"keep-alive">>, Conns) of - true -> keepalive; - false -> close + try cow_http_hd:parse_connection(Conn) of + Conns -> + case lists:member(<<"keep-alive">>, Conns) of + true -> keepalive; + false -> close + end + catch _:_ -> + bad_connection_header end; maybe_req_close(_, _, 'HTTP/1.0') -> close; maybe_req_close(_, #{<<"connection">> := Conn}, 'HTTP/1.1') -> - case connection_hd_is_close(Conn) of + try connection_hd_is_close(Conn) of true -> close; false -> keepalive + catch _:_ -> + bad_connection_header end; maybe_req_close(_, _, _) -> keepalive. @@ -1420,37 +1528,55 @@ error_terminate(StatusCode, State=#state{ref=Ref, peer=Peer, in_state=StreamStat early_error(StatusCode, State, Reason, PartialReq) -> early_error(StatusCode, State, Reason, PartialReq, #{}). -early_error(StatusCode0, #state{socket=Socket, transport=Transport, +early_error(StatusCode0, State=#state{socket=Socket, transport=Transport, opts=Opts, in_streamid=StreamID}, Reason, PartialReq, RespHeaders0) -> RespHeaders1 = RespHeaders0#{<<"content-length">> => <<"0">>}, Resp = {response, StatusCode0, RespHeaders1, <<>>}, try cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts) of {response, StatusCode, RespHeaders, RespBody} -> - Transport:send(Socket, [ - cow_http:response(StatusCode, 'HTTP/1.1', maps:to_list(RespHeaders)), - %% @todo We shouldn't send the body when the method is HEAD. - %% @todo Technically we allow the sendfile tuple. - RespBody - ]) + ok = maybe_socket_error(State, + Transport:send(Socket, [ + cow_http:response(StatusCode, 'HTTP/1.1', maps:to_list(RespHeaders)), + %% @todo We shouldn't send the body when the method is HEAD. + %% @todo Technically we allow the sendfile tuple. + RespBody + ]) + ) catch Class:Exception:Stacktrace -> cowboy:log(cowboy_stream:make_error_log(early_error, [StreamID, Reason, PartialReq, Resp, Opts], Class, Exception, Stacktrace), Opts), %% We still need to send an error response, so send what we initially %% wanted to send. It's better than nothing. - Transport:send(Socket, cow_http:response(StatusCode0, - 'HTTP/1.1', maps:to_list(RespHeaders1))) - end, - ok. + ok = maybe_socket_error(State, + Transport:send(Socket, cow_http:response(StatusCode0, + 'HTTP/1.1', maps:to_list(RespHeaders1))) + ) + end. initiate_closing(State=#state{streams=[]}, Reason) -> terminate(State, Reason); -initiate_closing(State=#state{streams=[_Stream|Streams], +initiate_closing(State=#state{streams=Streams, out_streamid=OutStreamID}, Reason) -> - terminate_all_streams(State, Streams, Reason), - State#state{last_streamid=OutStreamID}. - --spec terminate(_, _) -> no_return(). + {value, LastStream, TerminatedStreams} + = lists:keytake(OutStreamID, #stream.id, Streams), + terminate_all_streams(State, TerminatedStreams, Reason), + State#state{streams=[LastStream], last_streamid=OutStreamID}. + +%% Function replicated in cowboy_http2. +maybe_socket_error(State, {error, closed}) -> + terminate(State, {socket_error, closed, 'The socket has been closed.'}); +maybe_socket_error(State, Reason) -> + maybe_socket_error(State, Reason, 'An error has occurred on the socket.'). + +maybe_socket_error(_, Result = ok, _) -> + Result; +maybe_socket_error(_, Result = {ok, _}, _) -> + Result; +maybe_socket_error(State, {error, Reason}, Human) -> + terminate(State, {socket_error, Reason, Human}). + +-spec terminate(#state{} | undefined, _) -> no_return(). terminate(undefined, Reason) -> exit({shutdown, Reason}); terminate(State=#state{streams=Streams, children=Children}, Reason) -> @@ -1484,6 +1610,9 @@ terminate_linger(State=#state{socket=Socket, transport=Transport, opts=Opts}) -> terminate_linger_before_loop(State, TimerRef, Messages) -> %% We may already be in active mode when we do this %% but it's OK because we are shutting down anyway. + %% + %% We specially handle the socket error to terminate + %% when an error occurs. case setopts_active(State) of ok -> terminate_linger_loop(State, TimerRef, Messages); @@ -1511,12 +1640,12 @@ terminate_linger_loop(State=#state{socket=Socket}, TimerRef, Messages) -> -spec system_continue(_, _, #state{}) -> ok. system_continue(_, _, State) -> - loop(State). + before_loop(State). -spec system_terminate(any(), _, _, #state{}) -> no_return(). system_terminate(Reason0, _, _, State) -> Reason = {stop, {exit, Reason0}, 'sys:terminate/2,3 was called.'}, - loop(initiate_closing(State, Reason)). + before_loop(initiate_closing(State, Reason)). -spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::{#state{}, binary()}. system_code_change(Misc, _, _, _) -> diff --git a/src/cowboy_http2.erl b/src/cowboy_http2.erl index ed2623c..0d22fa1 100644 --- a/src/cowboy_http2.erl +++ b/src/cowboy_http2.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2015-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -17,6 +17,7 @@ -export([init/6]). -export([init/10]). -export([init/12]). +-export([loop/2]). -export([system_continue/3]). -export([system_terminate/4]). @@ -24,15 +25,20 @@ -type opts() :: #{ active_n => pos_integer(), + alpn_default_protocol => http | http2, compress_buffering => boolean(), compress_threshold => non_neg_integer(), connection_type => worker | supervisor, connection_window_margin_size => 0..16#7fffffff, connection_window_update_threshold => 0..16#7fffffff, + dynamic_buffer => false | {pos_integer(), pos_integer()}, + dynamic_buffer_initial_average => non_neg_integer(), + dynamic_buffer_initial_size => pos_integer(), enable_connect_protocol => boolean(), env => cowboy_middleware:env(), goaway_initial_timeout => timeout(), goaway_complete_timeout => timeout(), + hibernate => boolean(), idle_timeout => timeout(), inactivity_timeout => timeout(), initial_connection_window_size => 65535..16#7fffffff, @@ -44,10 +50,12 @@ max_connection_window_size => 0..16#7fffffff, max_decode_table_size => non_neg_integer(), max_encode_table_size => non_neg_integer(), + max_fragmented_header_block_size => 16384..16#7fffffff, max_frame_size_received => 16384..16777215, max_frame_size_sent => 16384..16777215 | infinity, max_received_frame_rate => {pos_integer(), timeout()}, max_reset_stream_rate => {pos_integer(), timeout()}, + max_cancel_stream_rate => {pos_integer(), timeout()}, max_stream_buffer_size => non_neg_integer(), max_stream_window_size => 0..16#7fffffff, metrics_callback => cowboy_metrics_h:metrics_callback(), @@ -55,7 +63,9 @@ metrics_resp_headers_filter => fun((cowboy:http_headers()) -> cowboy:http_headers()), middlewares => [module()], preface_timeout => timeout(), + protocols => [http | http2], proxy_header => boolean(), + reset_idle_timeout_on_send => boolean(), sendfile => boolean(), settings_timeout => timeout(), shutdown_timeout => timeout(), @@ -82,6 +92,14 @@ state :: {module, any()} }). +%% We don't want to reset the idle timeout too often, +%% so we don't reset it on data. Instead we reset the +%% number of ticks we have observed. We divide the +%% timeout value by a value and that value becomes +%% the number of ticks at which point we can drop +%% the connection. This value is the number of ticks. +-define(IDLE_TIMEOUT_TICKS, 10). + -record(state, { parent = undefined :: pid(), ref :: ranch:ref(), @@ -92,6 +110,7 @@ %% Timer for idle_timeout; also used for goaway timers. timer = undefined :: undefined | reference(), + idle_timeout_num = 0 :: 0..?IDLE_TIMEOUT_TICKS, %% Remote address and port for the connection. peer = undefined :: {inet:ip_address(), inet:port_number()}, @@ -114,9 +133,17 @@ reset_rate_num :: undefined | pos_integer(), reset_rate_time :: undefined | integer(), + %% HTTP/2 rapid reset attack protection. + cancel_rate_num :: undefined | pos_integer(), + cancel_rate_time :: undefined | integer(), + %% Flow requested for all streams. flow = 0 :: non_neg_integer(), + %% Dynamic buffer moving average and current buffer size. + dynamic_buffer_size :: pos_integer() | false, + dynamic_buffer_moving_average :: non_neg_integer(), + %% Currently active HTTP/2 streams. Streams may be initiated either %% by the client or by the server through PUSH_PROMISE frames. streams = #{} :: #{cow_http2:streamid() => #stream{}}, @@ -127,11 +154,14 @@ }). -spec init(pid(), ranch:ref(), inet:socket(), module(), - ranch_proxy_header:proxy_info() | undefined, cowboy:opts()) -> ok. + ranch_proxy_header:proxy_info() | undefined, cowboy:opts()) -> no_return(). + init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> - Peer0 = Transport:peername(Socket), - Sock0 = Transport:sockname(Socket), - Cert1 = case Transport:name() of + {ok, Peer} = maybe_socket_error(undefined, Transport:peername(Socket), + 'A socket error occurred when retrieving the peer name.'), + {ok, Sock} = maybe_socket_error(undefined, Transport:sockname(Socket), + 'A socket error occurred when retrieving the sock name.'), + CertResult = case Transport:name() of ssl -> case ssl:peercert(Socket) of {error, no_peercert} -> @@ -142,40 +172,37 @@ init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> _ -> {ok, undefined} end, - case {Peer0, Sock0, Cert1} of - {{ok, Peer}, {ok, Sock}, {ok, Cert}} -> - init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, <<>>); - {{error, Reason}, _, _} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the peer name.'}); - {_, {error, Reason}, _} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the sock name.'}); - {_, _, {error, Reason}} -> - terminate(undefined, {socket_error, Reason, - 'A socket error occurred when retrieving the client TLS certificate.'}) - end. + {ok, Cert} = maybe_socket_error(undefined, CertResult, + 'A socket error occurred when retrieving the client TLS certificate.'), + init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, <<>>). -spec init(pid(), ranch:ref(), inet:socket(), module(), ranch_proxy_header:proxy_info() | undefined, cowboy:opts(), {inet:ip_address(), inet:port_number()}, {inet:ip_address(), inet:port_number()}, - binary() | undefined, binary()) -> ok. + binary() | undefined, binary()) -> no_return(). + init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, Buffer) -> + DynamicBuffer = init_dynamic_buffer_size(Opts), {ok, Preface, HTTP2Machine} = cow_http2_machine:init(server, Opts), + %% Send the preface before doing all the init in case we get a socket error. + ok = maybe_socket_error(undefined, Transport:send(Socket, Preface)), State = set_idle_timeout(init_rate_limiting(#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert, - http2_status=sequence, http2_machine=HTTP2Machine})), - Transport:send(Socket, Preface), - setopts_active(State), + dynamic_buffer_size=DynamicBuffer, + dynamic_buffer_moving_average=maps:get(dynamic_buffer_initial_average, Opts, 0), + http2_status=sequence, http2_machine=HTTP2Machine}), 0), + safe_setopts_active(State), case Buffer of - <<>> -> loop(State, Buffer); + <<>> -> before_loop(State, Buffer); _ -> parse(State, Buffer) end. -init_rate_limiting(State) -> +init_rate_limiting(State0) -> CurrentTime = erlang:monotonic_time(millisecond), - init_reset_rate_limiting(init_frame_rate_limiting(State, CurrentTime), CurrentTime). + State1 = init_frame_rate_limiting(State0, CurrentTime), + State2 = init_reset_rate_limiting(State1, CurrentTime), + init_cancel_rate_limiting(State2, CurrentTime). init_frame_rate_limiting(State=#state{opts=Opts}, CurrentTime) -> {FrameRateNum, FrameRatePeriod} = maps:get(max_received_frame_rate, Opts, {10000, 10000}), @@ -189,6 +216,12 @@ init_reset_rate_limiting(State=#state{opts=Opts}, CurrentTime) -> reset_rate_num=ResetRateNum, reset_rate_time=add_period(CurrentTime, ResetRatePeriod) }. +init_cancel_rate_limiting(State=#state{opts=Opts}, CurrentTime) -> + {CancelRateNum, CancelRatePeriod} = maps:get(max_cancel_stream_rate, Opts, {500, 10000}), + State#state{ + cancel_rate_num=CancelRateNum, cancel_rate_time=add_period(CurrentTime, CancelRatePeriod) + }. + add_period(_, infinity) -> infinity; add_period(Time, Period) -> Time + Period. @@ -196,15 +229,19 @@ add_period(Time, Period) -> Time + Period. -spec init(pid(), ranch:ref(), inet:socket(), module(), ranch_proxy_header:proxy_info() | undefined, cowboy:opts(), {inet:ip_address(), inet:port_number()}, {inet:ip_address(), inet:port_number()}, - binary() | undefined, binary(), map() | undefined, cowboy_req:req()) -> ok. + binary() | undefined, binary(), map() | undefined, cowboy_req:req()) -> no_return(). + init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, Buffer, _Settings, Req=#{method := Method}) -> + DynamicBuffer = init_dynamic_buffer_size(Opts), {ok, Preface, HTTP2Machine0} = cow_http2_machine:init(server, Opts), {ok, StreamID, HTTP2Machine} = cow_http2_machine:init_upgrade_stream(Method, HTTP2Machine0), State0 = #state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert, + dynamic_buffer_size=DynamicBuffer, + dynamic_buffer_moving_average=maps:get(dynamic_buffer_initial_average, Opts, 0), http2_status=upgrade, http2_machine=HTTP2Machine}, State1 = headers_frame(State0#state{ http2_machine=HTTP2Machine}, StreamID, Req), @@ -214,21 +251,36 @@ init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, Buffer <<"connection">> => <<"Upgrade">>, <<"upgrade">> => <<"h2c">> }, ?MODULE, undefined}), %% @todo undefined or #{}? - State = set_idle_timeout(init_rate_limiting(State2#state{http2_status=sequence})), - Transport:send(Socket, Preface), - setopts_active(State), + State = set_idle_timeout(init_rate_limiting(State2#state{http2_status=sequence}), 0), + %% In the case of HTTP/1.1 Upgrade we cannot send the Preface + %% until we send the 101 response. + ok = maybe_socket_error(State, Transport:send(Socket, Preface)), + safe_setopts_active(State), case Buffer of - <<>> -> loop(State, Buffer); + <<>> -> before_loop(State, Buffer); _ -> parse(State, Buffer) end. +-include("cowboy_dynamic_buffer.hrl"). + %% Because HTTP/2 has flow control and Cowboy has other rate limiting %% mechanisms implemented, a very large active_n value should be fine, %% as long as the stream handlers do their work in a timely manner. +%% However large active_n values reduce the impact of dynamic_buffer. setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> - N = maps:get(active_n, Opts, 100), + N = maps:get(active_n, Opts, 1), Transport:setopts(Socket, [{active, N}]). +safe_setopts_active(State) -> + ok = maybe_socket_error(State, setopts_active(State)). + +before_loop(State=#state{opts=#{hibernate := true}}, Buffer) -> + proc_lib:hibernate(?MODULE, loop, [State, Buffer]); +before_loop(State, Buffer) -> + loop(State, Buffer). + +-spec loop(#state{}, binary()) -> no_return(). + loop(State=#state{parent=Parent, socket=Socket, transport=Transport, opts=Opts, timer=TimerRef, children=Children}, Buffer) -> Messages = Transport:messages(), @@ -236,7 +288,8 @@ loop(State=#state{parent=Parent, socket=Socket, transport=Transport, receive %% Socket messages. {OK, Socket, Data} when OK =:= element(1, Messages) -> - parse(set_idle_timeout(State), << Buffer/binary, Data/binary >>); + State1 = maybe_resize_buffer(State, Data), + parse(State1#state{idle_timeout_num=0}, << Buffer/binary, Data/binary >>); {Closed, Socket} when Closed =:= element(2, Messages) -> Reason = case State#state.http2_status of closing -> {stop, closed, 'The client is going away.'}; @@ -248,53 +301,64 @@ loop(State=#state{parent=Parent, socket=Socket, transport=Transport, {Passive, Socket} when Passive =:= element(4, Messages); %% Hardcoded for compatibility with Ranch 1.x. Passive =:= tcp_passive; Passive =:= ssl_passive -> - setopts_active(State), - loop(State, Buffer); + safe_setopts_active(State), + before_loop(State, Buffer); %% System messages. {'EXIT', Parent, shutdown} -> Reason = {stop, {exit, shutdown}, 'Parent process requested shutdown.'}, - loop(initiate_closing(State, Reason), Buffer); + before_loop(initiate_closing(State, Reason), Buffer); {'EXIT', Parent, Reason} -> terminate(State, {stop, {exit, Reason}, 'Parent process terminated.'}); {system, From, Request} -> sys:handle_system_msg(Request, From, Parent, ?MODULE, [], {State, Buffer}); %% Timeouts. {timeout, TimerRef, idle_timeout} -> - terminate(State, {stop, timeout, - 'Connection idle longer than configuration allows.'}); + tick_idle_timeout(State, Buffer); {timeout, Ref, {shutdown, Pid}} -> cowboy_children:shutdown_timeout(Children, Ref, Pid), - loop(State, Buffer); + before_loop(State, Buffer); {timeout, TRef, {cow_http2_machine, Name}} -> - loop(timeout(State, Name, TRef), Buffer); + before_loop(timeout(State, Name, TRef), Buffer); {timeout, TimerRef, {goaway_initial_timeout, Reason}} -> - loop(closing(State, Reason), Buffer); + before_loop(closing(State, Reason), Buffer); {timeout, TimerRef, {goaway_complete_timeout, Reason}} -> terminate(State, {stop, stop_reason(Reason), 'Graceful shutdown timed out.'}); %% Messages pertaining to a stream. {{Pid, StreamID}, Msg} when Pid =:= self() -> - loop(info(State, StreamID, Msg), Buffer); + before_loop(info(State, StreamID, Msg), Buffer); %% Exit signal from children. Msg = {'EXIT', Pid, _} -> - loop(down(State, Pid, Msg), Buffer); + before_loop(down(State, Pid, Msg), Buffer); %% Calls from supervisor module. {'$gen_call', From, Call} -> cowboy_children:handle_supervisor_call(Call, From, Children, ?MODULE), - loop(State, Buffer); + before_loop(State, Buffer); Msg -> cowboy:log(warning, "Received stray message ~p.", [Msg], Opts), - loop(State, Buffer) + before_loop(State, Buffer) after InactivityTimeout -> terminate(State, {internal_error, timeout, 'No message or data received before timeout.'}) end. -set_idle_timeout(State=#state{http2_status=Status, timer=TimerRef}) +tick_idle_timeout(State=#state{idle_timeout_num=?IDLE_TIMEOUT_TICKS}, _) -> + terminate(State, {stop, timeout, + 'Connection idle longer than configuration allows.'}); +tick_idle_timeout(State=#state{idle_timeout_num=TimeoutNum}, Buffer) -> + before_loop(set_idle_timeout(State, TimeoutNum + 1), Buffer). + +set_idle_timeout(State=#state{http2_status=Status, timer=TimerRef}, _) when Status =:= closing_initiated orelse Status =:= closing, TimerRef =/= undefined -> State; -set_idle_timeout(State=#state{opts=Opts}) -> - set_timeout(State, maps:get(idle_timeout, Opts, 60000), idle_timeout). +set_idle_timeout(State=#state{opts=Opts}, TimeoutNum) -> + case maps:get(idle_timeout, Opts, 60000) of + infinity -> + State#state{timer=undefined}; + Timeout -> + set_timeout(State#state{idle_timeout_num=TimeoutNum}, + Timeout div ?IDLE_TIMEOUT_TICKS, idle_timeout) + end. set_timeout(State=#state{timer=TimerRef0}, Timeout, Message) -> ok = case TimerRef0 of @@ -307,6 +371,14 @@ set_timeout(State=#state{timer=TimerRef0}, Timeout, Message) -> end, State#state{timer=TimerRef}. +maybe_reset_idle_timeout(State=#state{opts=Opts}) -> + case maps:get(reset_idle_timeout_on_send, Opts, false) of + true -> + State#state{idle_timeout_num=0}; + false -> + State + end. + %% HTTP/2 protocol parsing. parse(State=#state{http2_status=sequence}, Data) -> @@ -314,7 +386,7 @@ parse(State=#state{http2_status=sequence}, Data) -> {ok, Rest} -> parse(State#state{http2_status=settings}, Rest); more -> - loop(State, Data); + before_loop(State, Data); Error = {connection_error, _, _} -> terminate(State, Error) end; @@ -333,7 +405,7 @@ parse(State=#state{http2_status=Status, http2_machine=HTTP2Machine, streams=Stre more when Status =:= closing, Streams =:= #{} -> terminate(State, {stop, normal, 'The connection is going away.'}); more -> - loop(State, Data) + before_loop(State, Data) end. %% Frame rate flood protection. @@ -383,10 +455,11 @@ frame(State=#state{http2_machine=HTTP2Machine0}, Frame) -> goaway(State#state{http2_machine=HTTP2Machine}, GoAway); {send, SendData, HTTP2Machine} -> %% We may need to send an alarm for each of the streams sending data. - lists:foldl( + State1 = lists:foldl( fun({StreamID, _, _}, S) -> maybe_send_data_alarm(S, HTTP2Machine0, StreamID) end, send_data(maybe_ack(State#state{http2_machine=HTTP2Machine}, Frame), SendData, []), - SendData); + SendData), + maybe_reset_idle_timeout(State1); {error, {stream_error, StreamID, Reason, Human}, HTTP2Machine} -> reset_stream(State#state{http2_machine=HTTP2Machine}, StreamID, {stream_error, Reason, Human}); @@ -398,15 +471,20 @@ frame(State=#state{http2_machine=HTTP2Machine0}, Frame) -> %% if we were still waiting for a SETTINGS frame. maybe_ack(State=#state{http2_status=settings}, Frame) -> maybe_ack(State#state{http2_status=connected}, Frame); +%% We do not reset the idle timeout on send here because we are +%% sending data as a consequence of receiving data, which means +%% we already resetted the idle timeout. maybe_ack(State=#state{socket=Socket, transport=Transport}, Frame) -> case Frame of - {settings, _} -> Transport:send(Socket, cow_http2:settings_ack()); - {ping, Opaque} -> Transport:send(Socket, cow_http2:ping_ack(Opaque)); + {settings, _} -> + ok = maybe_socket_error(State, Transport:send(Socket, cow_http2:settings_ack())); + {ping, Opaque} -> + ok = maybe_socket_error(State, Transport:send(Socket, cow_http2:ping_ack(Opaque))); _ -> ok end, State. -data_frame(State0=#state{opts=Opts, flow=Flow, streams=Streams}, StreamID, IsFin, Data) -> +data_frame(State0=#state{opts=Opts, flow=Flow0, streams=Streams}, StreamID, IsFin, Data) -> case Streams of #{StreamID := Stream=#stream{status=running, flow=StreamFlow, state=StreamState0}} -> try cowboy_stream:data(StreamID, IsFin, Data, StreamState0) of @@ -415,11 +493,26 @@ data_frame(State0=#state{opts=Opts, flow=Flow, streams=Streams}, StreamID, IsFin %% We may receive more data than we requested. We ensure %% that the flow value doesn't go lower than 0. Size = byte_size(Data), - State = update_window(State0#state{flow=max(0, Flow - Size), + Flow = max(0, Flow0 - Size), + %% We would normally update the window when changing the flow + %% value. But because we are running commands, which themselves + %% may update the window, and we want to avoid updating the + %% window twice in a row, we first run the commands and then + %% only update the window a flow command was executed. We know + %% that it was because the flow value changed in the state. + State1 = State0#state{flow=Flow, streams=Streams#{StreamID => Stream#stream{ flow=max(0, StreamFlow - Size), state=StreamState}}}, - StreamID), - commands(State, StreamID, Commands) + State = commands(State1, StreamID, Commands), + case State of + %% No flow command was executed. We must update the window + %% because we changed the flow value earlier. + #state{flow=Flow} -> + update_window(State, StreamID); + %% Otherwise the window was updated already. + _ -> + State + end catch Class:Exception:Stacktrace -> cowboy:log(cowboy_stream:make_error_log(data, [StreamID, IsFin, Data, StreamState0], @@ -568,11 +661,27 @@ rst_stream_frame(State=#state{streams=Streams0, children=Children0}, StreamID, R {#stream{state=StreamState}, Streams} -> terminate_stream_handler(State, StreamID, Reason, StreamState), Children = cowboy_children:shutdown(Children0, StreamID), - State#state{streams=Streams, children=Children}; + cancel_rate_limit(State#state{streams=Streams, children=Children}); error -> State end. +cancel_rate_limit(State0=#state{cancel_rate_num=Num0, cancel_rate_time=Time}) -> + case Num0 - 1 of + 0 -> + CurrentTime = erlang:monotonic_time(millisecond), + if + CurrentTime < Time -> + terminate(State0, {connection_error, enhance_your_calm, + 'Stream cancel rate larger than configuration allows. Flood? (CVE-2023-44487)'}); + true -> + %% When the option has a period of infinity we cannot reach this clause. + init_cancel_rate_limiting(State0, CurrentTime) + end; + Num -> + State0#state{cancel_rate_num=Num} + end. + ignored_frame(State=#state{http2_machine=HTTP2Machine0}) -> case cow_http2_machine:ignored_frame(HTTP2Machine0) of {ok, HTTP2Machine} -> @@ -657,23 +766,37 @@ commands(State=#state{http2_machine=HTTP2Machine}, StreamID, end; %% Send an informational response. commands(State0, StreamID, [{inform, StatusCode, Headers}|Tail]) -> - State = send_headers(State0, StreamID, idle, StatusCode, Headers), + State1 = send_headers(State0, StreamID, idle, StatusCode, Headers), + State = maybe_reset_idle_timeout(State1), commands(State, StreamID, Tail); %% Send response headers. commands(State0, StreamID, [{response, StatusCode, Headers, Body}|Tail]) -> - State = send_response(State0, StreamID, StatusCode, Headers, Body), + State1 = send_response(State0, StreamID, StatusCode, Headers, Body), + State = maybe_reset_idle_timeout(State1), commands(State, StreamID, Tail); %% Send response headers. commands(State0, StreamID, [{headers, StatusCode, Headers}|Tail]) -> - State = send_headers(State0, StreamID, nofin, StatusCode, Headers), + State1 = send_headers(State0, StreamID, nofin, StatusCode, Headers), + State = maybe_reset_idle_timeout(State1), commands(State, StreamID, Tail); %% Send a response body chunk. commands(State0, StreamID, [{data, IsFin, Data}|Tail]) -> - State = maybe_send_data(State0, StreamID, IsFin, Data, []), + State = case maybe_send_data(State0, StreamID, IsFin, Data, []) of + {data_sent, State1} -> + maybe_reset_idle_timeout(State1); + {no_data_sent, State1} -> + State1 + end, commands(State, StreamID, Tail); %% Send trailers. commands(State0, StreamID, [{trailers, Trailers}|Tail]) -> - State = maybe_send_data(State0, StreamID, fin, {trailers, maps:to_list(Trailers)}, []), + State = case maybe_send_data(State0, StreamID, fin, + {trailers, maps:to_list(Trailers)}, []) of + {data_sent, State1} -> + maybe_reset_idle_timeout(State1); + {no_data_sent, State1} -> + State1 + end, commands(State, StreamID, Tail); %% Send a push promise. %% @@ -705,10 +828,11 @@ commands(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Ma State = case cow_http2_machine:prepare_push_promise(StreamID, HTTP2Machine0, PseudoHeaders, Headers) of {ok, PromisedStreamID, HeaderBlock, HTTP2Machine} -> - Transport:send(Socket, cow_http2:push_promise( - StreamID, PromisedStreamID, HeaderBlock)), - headers_frame(State0#state{http2_machine=HTTP2Machine}, - PromisedStreamID, fin, Headers, PseudoHeaders, 0); + State1 = State0#state{http2_machine=HTTP2Machine}, + ok = maybe_socket_error(State1, Transport:send(Socket, + cow_http2:push_promise(StreamID, PromisedStreamID, HeaderBlock))), + State2 = maybe_reset_idle_timeout(State1), + headers_frame(State2, PromisedStreamID, fin, Headers, PseudoHeaders, 0); {error, no_push} -> State0 end, @@ -731,10 +855,14 @@ commands(State, StreamID, [Error = {internal_error, _, _}|_Tail]) -> %% @todo Only reset when the stream still exists. reset_stream(State, StreamID, Error); %% Upgrade to HTTP/2. This is triggered by cowboy_http2 itself. +%% +%% We do not need to reset the idle timeout on send because it +%% hasn't been set yet. This is called from init/12. commands(State=#state{socket=Socket, transport=Transport, http2_status=upgrade}, StreamID, [{switch_protocol, Headers, ?MODULE, _}|Tail]) -> %% @todo This 101 response needs to be passed through stream handlers. - Transport:send(Socket, cow_http:response(101, 'HTTP/1.1', maps:to_list(Headers))), + ok = maybe_socket_error(State, Transport:send(Socket, + cow_http:response(101, 'HTTP/1.1', maps:to_list(Headers)))), commands(State, StreamID, Tail); %% Use a different protocol within the stream (CONNECT :protocol). %% @todo Make sure we error out when the feature is disabled. @@ -755,22 +883,32 @@ commands(State=#state{opts=Opts}, StreamID, [Log={log, _, _, _}|Tail]) -> %% Tentatively update the window after the flow was updated. -update_window(State=#state{socket=Socket, transport=Transport, +update_window(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0, flow=Flow, streams=Streams}, StreamID) -> - #{StreamID := #stream{flow=StreamFlow}} = Streams, {Data1, HTTP2Machine2} = case cow_http2_machine:ensure_window(Flow, HTTP2Machine0) of ok -> {<<>>, HTTP2Machine0}; {ok, Increment1, HTTP2Machine1} -> {cow_http2:window_update(Increment1), HTTP2Machine1} end, - {Data2, HTTP2Machine} = case cow_http2_machine:ensure_window(StreamID, StreamFlow, HTTP2Machine2) of - ok -> {<<>>, HTTP2Machine2}; - {ok, Increment2, HTTP2Machine3} -> {cow_http2:window_update(StreamID, Increment2), HTTP2Machine3} + {Data2, HTTP2Machine} = case Streams of + #{StreamID := #stream{flow=StreamFlow}} -> + case cow_http2_machine:ensure_window(StreamID, StreamFlow, HTTP2Machine2) of + ok -> + {<<>>, HTTP2Machine2}; + {ok, Increment2, HTTP2Machine3} -> + {cow_http2:window_update(StreamID, Increment2), HTTP2Machine3} + end; + _ -> + %% Don't update the stream's window if it stopped. + {<<>>, HTTP2Machine2} end, + State = State0#state{http2_machine=HTTP2Machine}, case {Data1, Data2} of - {<<>>, <<>>} -> ok; - _ -> Transport:send(Socket, [Data1, Data2]) - end, - State#state{http2_machine=HTTP2Machine}. + {<<>>, <<>>} -> + State; + _ -> + ok = maybe_socket_error(State, Transport:send(Socket, [Data1, Data2])), + maybe_reset_idle_timeout(State) + end. %% Send the response, trailers or data. @@ -790,18 +928,21 @@ send_response(State0=#state{http2_machine=HTTP2Machine0}, StreamID, StatusCode, = cow_http2_machine:prepare_headers(StreamID, HTTP2Machine0, nofin, #{status => cow_http:status_to_integer(StatusCode)}, headers_to_list(Headers)), - maybe_send_data(State0#state{http2_machine=HTTP2Machine}, StreamID, fin, Body, - [cow_http2:headers(StreamID, nofin, HeaderBlock)]) + {_, State} = maybe_send_data(State0#state{http2_machine=HTTP2Machine}, + StreamID, fin, Body, [cow_http2:headers(StreamID, nofin, HeaderBlock)]), + State end. -send_headers(State=#state{socket=Socket, transport=Transport, +send_headers(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0}, StreamID, IsFin0, StatusCode, Headers) -> {ok, IsFin, HeaderBlock, HTTP2Machine} = cow_http2_machine:prepare_headers(StreamID, HTTP2Machine0, IsFin0, #{status => cow_http:status_to_integer(StatusCode)}, headers_to_list(Headers)), - Transport:send(Socket, cow_http2:headers(StreamID, IsFin, HeaderBlock)), - State#state{http2_machine=HTTP2Machine}. + State = State0#state{http2_machine=HTTP2Machine}, + ok = maybe_socket_error(State, Transport:send(Socket, + cow_http2:headers(StreamID, IsFin, HeaderBlock))), + State. %% The set-cookie header is special; we can only send one cookie per header. headers_to_list(Headers0=#{<<"set-cookie">> := SetCookies}) -> @@ -818,13 +959,18 @@ maybe_send_data(State0=#state{socket=Socket, transport=Transport, end, case cow_http2_machine:send_or_queue_data(StreamID, HTTP2Machine0, IsFin, Data) of {ok, HTTP2Machine} -> + State1 = State0#state{http2_machine=HTTP2Machine}, %% If we have prefix data (like a HEADERS frame) we need to send it %% even if we do not send any DATA frames. - case Prefix of - [] -> ok; - _ -> Transport:send(Socket, Prefix) + WasDataSent = case Prefix of + [] -> + no_data_sent; + _ -> + ok = maybe_socket_error(State1, Transport:send(Socket, Prefix)), + data_sent end, - maybe_send_data_alarm(State0#state{http2_machine=HTTP2Machine}, HTTP2Machine0, StreamID); + State = maybe_send_data_alarm(State1, HTTP2Machine0, StreamID), + {WasDataSent, State}; {send, SendData, HTTP2Machine} -> State = #state{http2_status=Status, streams=Streams} = send_data(State0#state{http2_machine=HTTP2Machine}, SendData, Prefix), @@ -833,7 +979,7 @@ maybe_send_data(State0=#state{socket=Socket, transport=Transport, Status =:= closing, Streams =:= #{} -> terminate(State, {stop, normal, 'The connection is going away.'}); true -> - maybe_send_data_alarm(State, HTTP2Machine0, StreamID) + {data_sent, maybe_send_data_alarm(State, HTTP2Machine0, StreamID)} end end. @@ -842,14 +988,23 @@ send_data(State0=#state{socket=Socket, transport=Transport, opts=Opts}, SendData _ = [case Data of {sendfile, Offset, Bytes, Path} -> %% When sendfile is disabled we explicitly use the fallback. - _ = case maps:get(sendfile, Opts, true) of - true -> Transport:sendfile(Socket, Path, Offset, Bytes); - false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) - end; + {ok, _} = maybe_socket_error(State, + case maps:get(sendfile, Opts, true) of + true -> Transport:sendfile(Socket, Path, Offset, Bytes); + false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) + end + ), + ok; _ -> - Transport:send(Socket, Data) + ok = maybe_socket_error(State, Transport:send(Socket, Data)) end || Data <- Acc], - State. + send_data_terminate(State, SendData). + +send_data_terminate(State, []) -> + State; +send_data_terminate(State0, [{StreamID, IsFin, _}|Tail]) -> + State = maybe_terminate_stream(State0, StreamID, IsFin), + send_data_terminate(State, Tail). prepare_data(State, [], Acc, []) -> {lists:reverse(Acc), State}; @@ -859,8 +1014,7 @@ prepare_data(State0, [{StreamID, IsFin, SendData}|Tail], Acc0, Buffer0) -> {Acc, Buffer, State} = prepare_data(State0, StreamID, IsFin, SendData, Acc0, Buffer0), prepare_data(State, Tail, Acc, Buffer). -prepare_data(State0, StreamID, IsFin, [], Acc, Buffer) -> - State = maybe_terminate_stream(State0, StreamID, IsFin), +prepare_data(State, _, _, [], Acc, Buffer) -> {Acc, Buffer, State}; prepare_data(State0, StreamID, IsFin, [FrameData|Tail], Acc, Buffer) -> FrameIsFin = case Tail of @@ -941,22 +1095,26 @@ stream_alarm(State, StreamID, Name, Value) -> %% We may have to cancel streams even if we receive multiple %% GOAWAY frames as the LastStreamID value may be lower than %% the one previously received. +%% +%% We do not reset the idle timeout on send here. We already +%% disabled it if we initiated shutdown; and we already reset +%% it if the client sent a GOAWAY frame. goaway(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0, http2_status=Status, streams=Streams0}, {goaway, LastStreamID, Reason, _}) when Status =:= connected; Status =:= closing_initiated; Status =:= closing -> Streams = goaway_streams(State0, maps:to_list(Streams0), LastStreamID, {stop, {goaway, Reason}, 'The connection is going away.'}, []), - State = State0#state{streams=maps:from_list(Streams)}, + State1 = State0#state{streams=maps:from_list(Streams)}, if Status =:= connected; Status =:= closing_initiated -> {OurLastStreamID, HTTP2Machine} = cow_http2_machine:set_last_streamid(HTTP2Machine0), - Transport:send(Socket, cow_http2:goaway( - OurLastStreamID, no_error, <<>>)), - State#state{http2_status=closing, - http2_machine=HTTP2Machine}; + State = State1#state{http2_status=closing, http2_machine=HTTP2Machine}, + ok = maybe_socket_error(State, Transport:send(Socket, + cow_http2:goaway(OurLastStreamID, no_error, <<>>))), + State; true -> - State + State1 end; %% We terminate the connection immediately if it hasn't fully been initialized. goaway(State, {goaway, _, Reason, _}) -> @@ -979,10 +1137,13 @@ goaway_streams(State, [Stream|Tail], LastStreamID, Reason, Acc) -> %% in-flight stream creation (at least one round-trip time), the server can send %% another GOAWAY frame with an updated last stream identifier. This ensures %% that a connection can be cleanly shut down without losing requests. + -spec initiate_closing(#state{}, _) -> #state{}. + initiate_closing(State=#state{http2_status=connected, socket=Socket, transport=Transport, opts=Opts}, Reason) -> - Transport:send(Socket, cow_http2:goaway(16#7fffffff, no_error, <<>>)), + ok = maybe_socket_error(State, Transport:send(Socket, + cow_http2:goaway(16#7fffffff, no_error, <<>>))), Timeout = maps:get(goaway_initial_timeout, Opts, 1000), Message = {goaway_initial_timeout, Reason}, set_timeout(State#state{http2_status=closing_initiated}, Timeout, Message); @@ -995,17 +1156,21 @@ initiate_closing(State, Reason) -> terminate(State, {stop, stop_reason(Reason), 'The connection is going away.'}). %% Switch to 'closing' state and stop accepting new streams. + -spec closing(#state{}, Reason :: term()) -> #state{}. + closing(State=#state{streams=Streams}, Reason) when Streams =:= #{} -> terminate(State, Reason); -closing(State=#state{http2_status=closing_initiated, +closing(State0=#state{http2_status=closing_initiated, http2_machine=HTTP2Machine0, socket=Socket, transport=Transport}, Reason) -> %% Stop accepting new streams. {LastStreamID, HTTP2Machine} = cow_http2_machine:set_last_streamid(HTTP2Machine0), - Transport:send(Socket, cow_http2:goaway(LastStreamID, no_error, <<>>)), - closing(State#state{http2_status=closing, http2_machine=HTTP2Machine}, Reason); + State = State0#state{http2_status=closing, http2_machine=HTTP2Machine}, + ok = maybe_socket_error(State, Transport:send(Socket, + cow_http2:goaway(LastStreamID, no_error, <<>>))), + closing(State, Reason); closing(State=#state{http2_status=closing, opts=Opts}, Reason) -> %% If client sent GOAWAY, we may already be in 'closing' but without the %% goaway complete timeout set. @@ -1016,7 +1181,21 @@ closing(State=#state{http2_status=closing, opts=Opts}, Reason) -> stop_reason({stop, Reason, _}) -> Reason; stop_reason(Reason) -> Reason. --spec terminate(#state{}, _) -> no_return(). +%% Function copied from cowboy_http. +maybe_socket_error(State, {error, closed}) -> + terminate(State, {socket_error, closed, 'The socket has been closed.'}); +maybe_socket_error(State, Reason) -> + maybe_socket_error(State, Reason, 'An error has occurred on the socket.'). + +maybe_socket_error(_, Result = ok, _) -> + Result; +maybe_socket_error(_, Result = {ok, _}, _) -> + Result; +maybe_socket_error(State, {error, Reason}, Human) -> + terminate(State, {socket_error, Reason, Human}). + +-spec terminate(#state{} | undefined, _) -> no_return(). + terminate(undefined, Reason) -> exit({shutdown, Reason}); terminate(State=#state{socket=Socket, transport=Transport, http2_status=Status, @@ -1026,7 +1205,8 @@ terminate(State=#state{socket=Socket, transport=Transport, http2_status=Status, %% as debug data in the GOAWAY frame here. Perhaps more. if Status =:= connected; Status =:= closing_initiated -> - Transport:send(Socket, cow_http2:goaway( + %% We are terminating so it's OK if we can't send the GOAWAY anymore. + _ = Transport:send(Socket, cow_http2:goaway( cow_http2_machine:get_last_streamid(HTTP2Machine), terminate_reason(Reason), <<>>)); %% We already sent the GOAWAY frame. @@ -1035,10 +1215,11 @@ terminate(State=#state{socket=Socket, transport=Transport, http2_status=Status, end, terminate_all_streams(State, maps:to_list(Streams), Reason), cowboy_children:terminate(Children), + %% @todo Don't linger on connection errors. terminate_linger(State), exit({shutdown, Reason}); -terminate(#state{socket=Socket, transport=Transport}, Reason) -> - Transport:close(Socket), +%% We are not fully connected so we can just terminate the connection. +terminate(_State, Reason) -> exit({shutdown, Reason}). terminate_reason({connection_error, Reason, _}) -> Reason; @@ -1072,6 +1253,9 @@ terminate_linger(State=#state{socket=Socket, transport=Transport, opts=Opts}) -> terminate_linger_before_loop(State, TimerRef, Messages) -> %% We may already be in active mode when we do this %% but it's OK because we are shutting down anyway. + %% + %% We specially handle the socket error to terminate + %% when an error occurs. case setopts_active(State) of ok -> terminate_linger_loop(State, TimerRef, Messages); @@ -1096,13 +1280,18 @@ terminate_linger_loop(State=#state{socket=Socket}, TimerRef, Messages) -> end. %% @todo Don't send an RST_STREAM if one was already sent. +%% +%% When resetting the stream we are technically sending data +%% on the socket. However due to implementation complexities +%% we do not attempt to reset the idle timeout on send. reset_stream(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0}, StreamID, Error) -> Reason = case Error of {internal_error, _, _} -> internal_error; {stream_error, Reason0, _} -> Reason0 end, - Transport:send(Socket, cow_http2:rst_stream(StreamID, Reason)), + ok = maybe_socket_error(State0, Transport:send(Socket, + cow_http2:rst_stream(StreamID, Reason))), State1 = case cow_http2_machine:reset_stream(StreamID, HTTP2Machine0) of {ok, HTTP2Machine} -> terminate_stream(State0#state{http2_machine=HTTP2Machine}, StreamID, Error); @@ -1174,7 +1363,8 @@ terminate_stream(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0}, StreamID) -> State = case cow_http2_machine:get_stream_local_state(StreamID, HTTP2Machine0) of {ok, fin, _} -> - Transport:send(Socket, cow_http2:rst_stream(StreamID, no_error)), + ok = maybe_socket_error(State0, Transport:send(Socket, + cow_http2:rst_stream(StreamID, no_error))), {ok, HTTP2Machine} = cow_http2_machine:reset_stream(StreamID, HTTP2Machine0), State0#state{http2_machine=HTTP2Machine}; {error, closed} -> @@ -1206,15 +1396,18 @@ terminate_stream_handler(#state{opts=Opts}, StreamID, Reason, StreamState) -> %% System callbacks. --spec system_continue(_, _, {#state{}, binary()}) -> ok. +-spec system_continue(_, _, {#state{}, binary()}) -> no_return(). + system_continue(_, _, {State, Buffer}) -> - loop(State, Buffer). + before_loop(State, Buffer). -spec system_terminate(any(), _, _, {#state{}, binary()}) -> no_return(). + system_terminate(Reason0, _, _, {State, Buffer}) -> Reason = {stop, {exit, Reason0}, 'sys:terminate/2,3 was called.'}, - loop(initiate_closing(State, Reason), Buffer). + before_loop(initiate_closing(State, Reason), Buffer). -spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::{#state{}, binary()}. + system_code_change(Misc, _, _, _) -> {ok, Misc}. diff --git a/src/cowboy_http3.erl b/src/cowboy_http3.erl new file mode 100644 index 0000000..9aa6be5 --- /dev/null +++ b/src/cowboy_http3.erl @@ -0,0 +1,1253 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% A key difference between cowboy_http2 and cowboy_http3 +%% is that HTTP/3 streams are QUIC streams and therefore +%% much of the connection state is handled outside of +%% Cowboy. + +-module(cowboy_http3). + +-export([init/4]). + +%% Temporary callback to do sendfile over QUIC. +-export([send/2]). + +%% @todo Graceful shutdown? Linger? Timeouts? Frame rates? PROXY header? +-type opts() :: #{ + compress_buffering => boolean(), + compress_threshold => non_neg_integer(), + connection_type => worker | supervisor, + enable_connect_protocol => boolean(), + env => cowboy_middleware:env(), + logger => module(), + max_decode_blocked_streams => 0..16#3fffffffffffffff, + max_decode_table_size => 0..16#3fffffffffffffff, + max_encode_blocked_streams => 0..16#3fffffffffffffff, + max_encode_table_size => 0..16#3fffffffffffffff, + max_ignored_frame_size_received => non_neg_integer() | infinity, + metrics_callback => cowboy_metrics_h:metrics_callback(), + metrics_req_filter => fun((cowboy_req:req()) -> map()), + metrics_resp_headers_filter => fun((cowboy:http_headers()) -> cowboy:http_headers()), + middlewares => [module()], + shutdown_timeout => timeout(), + stream_handlers => [module()], + tracer_callback => cowboy_tracer_h:tracer_callback(), + tracer_flags => [atom()], + tracer_match_specs => cowboy_tracer_h:tracer_match_specs(), + %% Open ended because configured stream handlers might add options. + _ => _ +}. +-export_type([opts/0]). + +%% HTTP/3 or WebTransport stream. +%% +%% WebTransport sessions involve one bidirectional CONNECT stream +%% that must stay open (and can be used for signaling using the +%% Capsule Protocol) and an application-defined number of +%% unidirectional and bidirectional streams, as well as datagrams. +%% +%% WebTransport sessions run in the CONNECT request process and +%% all events related to the session is sent there as a message. +%% The pid of the process is kept in the state. +-record(stream, { + id :: cow_http3:stream_id(), + + %% Whether the stream is currently in a special state. + status :: header | {unidi, control | encoder | decoder} + | normal | {data | ignore, non_neg_integer()} | stopping + | {webtransport_session, normal | {ignore, non_neg_integer()}} + | {webtransport_stream, cow_http3:stream_id()}, + + %% Stream buffer. + buffer = <<>> :: binary(), + + %% Stream state. + state = undefined :: undefined | {module(), any()} +}). + +-record(state, { + parent :: pid(), + ref :: ranch:ref(), + conn :: cowboy_quicer:quicer_connection_handle(), + opts = #{} :: opts(), + + %% Remote address and port for the connection. + peer = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Local address and port for the connection. + sock = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Client certificate. + cert :: undefined | binary(), + + %% HTTP/3 state machine. + http3_machine :: cow_http3_machine:http3_machine(), + + %% Specially handled local unidi streams. + local_control_id = undefined :: undefined | cow_http3:stream_id(), + local_encoder_id = undefined :: undefined | cow_http3:stream_id(), + local_decoder_id = undefined :: undefined | cow_http3:stream_id(), + + %% Bidirectional streams used for requests and responses, + %% as well as unidirectional streams initiated by the client. + streams = #{} :: #{cow_http3:stream_id() => #stream{}}, + + %% Lingering streams that were recently reset. We may receive + %% pending data or messages for these streams a short while + %% after they have been reset. + lingering_streams = [] :: [non_neg_integer()], + + %% Streams can spawn zero or more children which are then managed + %% by this module if operating as a supervisor. + children = cowboy_children:init() :: cowboy_children:children() +}). + +-spec init(pid(), ranch:ref(), cowboy_quicer:quicer_connection_handle(), opts()) + -> no_return(). + +init(Parent, Ref, Conn, Opts) -> + {ok, SettingsBin, HTTP3Machine0} = cow_http3_machine:init(server, Opts), + %% Immediately open a control, encoder and decoder stream. + %% @todo An endpoint MAY avoid creating an encoder stream if it will not be used (for example, if its encoder does not wish to use the dynamic table or if the maximum size of the dynamic table permitted by the peer is zero). + %% @todo An endpoint MAY avoid creating a decoder stream if its decoder sets the maximum capacity of the dynamic table to zero. + {ok, ControlID} = maybe_socket_error(undefined, + cowboy_quicer:start_unidi_stream(Conn, [<<0>>, SettingsBin]), + 'A socket error occurred when opening the control stream.'), + {ok, EncoderID} = maybe_socket_error(undefined, + cowboy_quicer:start_unidi_stream(Conn, <<2>>), + 'A socket error occurred when opening the encoder stream.'), + {ok, DecoderID} = maybe_socket_error(undefined, + cowboy_quicer:start_unidi_stream(Conn, <<3>>), + 'A socket error occurred when opening the encoder stream.'), + %% Set the control, encoder and decoder streams in the machine. + HTTP3Machine = cow_http3_machine:init_unidi_local_streams( + ControlID, EncoderID, DecoderID, HTTP3Machine0), + %% Get the peername/sockname/cert. + {ok, Peer} = maybe_socket_error(undefined, cowboy_quicer:peername(Conn), + 'A socket error occurred when retrieving the peer name.'), + {ok, Sock} = maybe_socket_error(undefined, cowboy_quicer:sockname(Conn), + 'A socket error occurred when retrieving the sock name.'), + CertResult = case cowboy_quicer:peercert(Conn) of + {error, no_peercert} -> + {ok, undefined}; + Cert0 -> + Cert0 + end, + {ok, Cert} = maybe_socket_error(undefined, CertResult, + 'A socket error occurred when retrieving the client TLS certificate.'), + %% Quick! Let's go! + loop(#state{parent=Parent, ref=Ref, conn=Conn, + opts=Opts, peer=Peer, sock=Sock, cert=Cert, + http3_machine=HTTP3Machine, local_control_id=ControlID, + local_encoder_id=EncoderID, local_decoder_id=DecoderID}). + +loop(State0=#state{opts=Opts, children=Children}) -> + receive + Msg when element(1, Msg) =:= quic -> + handle_quic_msg(State0, Msg); + %% Timeouts. + {timeout, Ref, {shutdown, Pid}} -> + cowboy_children:shutdown_timeout(Children, Ref, Pid), + loop(State0); + %% Messages pertaining to a stream. + {{Pid, StreamID}, Msg} when Pid =:= self() -> + loop(info(State0, StreamID, Msg)); + %% WebTransport commands. + {'$webtransport_commands', SessionID, Commands} -> + loop(webtransport_commands(State0, SessionID, Commands)); + %% Exit signal from children. + Msg = {'EXIT', Pid, _} -> + loop(down(State0, Pid, Msg)); + Msg -> + cowboy:log(warning, "Received stray message ~p.", [Msg], Opts), + loop(State0) + end. + +handle_quic_msg(State0=#state{opts=Opts}, Msg) -> + case cowboy_quicer:handle(Msg) of + {data, StreamID, IsFin, Data} -> + parse(State0, StreamID, Data, IsFin); + {datagram, Data} -> + parse_datagram(State0, Data); + {stream_started, StreamID, StreamType} -> + State = stream_new_remote(State0, StreamID, StreamType), + loop(State); + {stream_closed, StreamID, ErrorCode} -> + State = stream_closed(State0, StreamID, ErrorCode), + loop(State); + {peer_send_shutdown, StreamID} -> + State = stream_peer_send_shutdown(State0, StreamID), + loop(State); + closed -> + %% @todo Different error reason if graceful? + Reason = {socket_error, closed, 'The socket has been closed.'}, + terminate(State0, Reason); + ok -> + loop(State0); + unknown -> + cowboy:log(warning, "Received unknown QUIC message ~p.", [Msg], Opts), + loop(State0); + {socket_error, Reason} -> + terminate(State0, {socket_error, Reason, + 'An error has occurred on the socket.'}) + end. + +parse(State=#state{opts=Opts}, StreamID, Data, IsFin) -> + case stream_get(State, StreamID) of + Stream=#stream{buffer= <<>>} -> + parse1(State, Stream, Data, IsFin); + Stream=#stream{buffer=Buffer} -> + Stream1 = Stream#stream{buffer= <<>>}, + parse1(stream_store(State, Stream1), + Stream1, <<Buffer/binary, Data/binary>>, IsFin); + %% Pending data for a stream that has been reset. Ignore. + error -> + case is_lingering_stream(State, StreamID) of + true -> + ok; + false -> + %% We avoid logging the data as it could be quite large. + cowboy:log(warning, "Received data for unknown stream ~p.", + [StreamID], Opts) + end, + loop(State) + end. + +parse1(State, Stream=#stream{status=header}, Data, IsFin) -> + parse_unidirectional_stream_header(State, Stream, Data, IsFin); +parse1(State=#state{http3_machine=HTTP3Machine0}, + #stream{status={unidi, Type}, id=StreamID}, Data, IsFin) + when Type =:= encoder; Type =:= decoder -> + case cow_http3_machine:unidi_data(Data, IsFin, StreamID, HTTP3Machine0) of + {ok, Instrs, HTTP3Machine} -> + loop(send_instructions(State#state{http3_machine=HTTP3Machine}, Instrs)); + {error, Error={connection_error, _, _}, HTTP3Machine} -> + terminate(State#state{http3_machine=HTTP3Machine}, Error) + end; +%% @todo Handle when IsFin = fin which must terminate the WT session. +parse1(State=#state{conn=Conn}, Stream=#stream{id=SessionID, status= + {webtransport_session, normal}}, Data, IsFin) -> + case cow_capsule:parse(Data) of + {ok, wt_drain_session, Rest} -> + webtransport_event(State, SessionID, close_initiated), + parse1(State, Stream, Rest, IsFin); + {ok, {wt_close_session, AppCode, AppMsg}, Rest} -> + %% This event will be handled specially and lead + %% to the termination of the session process. + webtransport_event(State, SessionID, {closed, AppCode, AppMsg}), + %% Shutdown the CONNECT stream immediately. + cowboy_quicer:shutdown_stream(Conn, SessionID), + %% @todo Will we receive a {stream_closed,...} after that? + %% If any data is received past that point this is an error. + %% @todo Don't crash, error out properly. + <<>> = Rest, + loop(webtransport_terminate_session(State, Stream)); + more -> + loop(stream_store(State, Stream#stream{buffer=Data})); + %% Ignore unhandled/unknown capsules. + %% @todo Do this when cow_capsule includes some. +% {ok, _, Rest} -> +% parse1(State, Stream, Rest, IsFin); +% {ok, Rest} -> +% parse1(State, Stream, Rest, IsFin); + %% @todo Make the max length configurable? + {skip, Len} when Len =< 8192 -> + loop(stream_store(State, Stream#stream{ + status={webtransport_session, {ignore, Len}}})); + {skip, Len} -> + %% @todo What should be done on capsule error? + error({todo, capsule_too_long, Len}); + error -> + %% @todo What should be done on capsule error? + error({todo, capsule_error, Data}) + end; +parse1(State, Stream=#stream{status= + {webtransport_session, {ignore, Len}}}, Data, IsFin) -> + case Data of + <<_:Len/unit:8, Rest/bits>> -> + parse1(State, Stream#stream{status={webtransport_session, normal}}, Rest, IsFin); + _ -> + loop(stream_store(State, Stream#stream{ + status={webtransport_session, {ignore, Len - byte_size(Data)}}})) + end; +parse1(State, #stream{id=StreamID, status={webtransport_stream, SessionID}}, Data, IsFin) -> + webtransport_event(State, SessionID, {stream_data, StreamID, IsFin, Data}), + %% No need to store the stream again, WT streams don't get changed here. + loop(State); +parse1(State, Stream=#stream{status={data, Len}, id=StreamID}, Data, IsFin) -> + DataLen = byte_size(Data), + if + DataLen < Len -> + %% We don't have the full frame but this is the end of the + %% data we have. So FrameIsFin is equivalent to IsFin here. + loop(frame(State, Stream#stream{status={data, Len - DataLen}}, {data, Data}, IsFin)); + true -> + <<Data1:Len/binary, Rest/bits>> = Data, + FrameIsFin = is_fin(IsFin, Rest), + parse(frame(State, Stream#stream{status=normal}, {data, Data1}, FrameIsFin), + StreamID, Rest, IsFin) + end; +parse1(State, Stream=#stream{status={ignore, Len}, id=StreamID}, Data, IsFin) -> + DataLen = byte_size(Data), + if + DataLen < Len -> + loop(stream_store(State, Stream#stream{status={ignore, Len - DataLen}})); + true -> + <<_:Len/binary, Rest/bits>> = Data, + parse(stream_store(State, Stream#stream{status=normal}), + StreamID, Rest, IsFin) + end; +%% @todo Clause that discards receiving data for stopping streams. +%% We may receive a few more frames after we abort receiving. +parse1(State=#state{opts=Opts}, Stream=#stream{id=StreamID}, Data, IsFin) -> + case cow_http3:parse(Data) of + {ok, Frame, Rest} -> + FrameIsFin = is_fin(IsFin, Rest), + parse(frame(State, Stream, Frame, FrameIsFin), StreamID, Rest, IsFin); + %% The WebTransport stream header is not a real frame. + {webtransport_stream_header, SessionID, Rest} -> + become_webtransport_stream(State, Stream, bidi, SessionID, Rest, IsFin); + {more, Frame = {data, _}, Len} -> + %% We're at the end of the data so FrameIsFin is equivalent to IsFin. + case IsFin of + nofin -> + %% The stream will be stored at the end of processing commands. + loop(frame(State, Stream#stream{status={data, Len}}, Frame, nofin)); + fin -> + terminate(State, {connection_error, h3_frame_error, + 'Last frame on stream was truncated. (RFC9114 7.1)'}) + end; + {more, ignore, Len} -> + %% @todo This setting should be tested. + %% + %% While the default value doesn't warrant doing a streaming ignore + %% (and could work just fine with the 'more' clause), this value + %% is configurable and users may want to set it large. + MaxIgnoredLen = maps:get(max_ignored_frame_size_received, Opts, 16384), + %% We're at the end of the data so FrameIsFin is equivalent to IsFin. + case IsFin of + nofin when Len < MaxIgnoredLen -> + %% We are not processing commands so we must store the stream. + %% We also call ignored_frame here; we will not need to call + %% it again when ignoring the rest of the data. + Stream1 = Stream#stream{status={ignore, Len}}, + State1 = ignored_frame(State, Stream1), + loop(stream_store(State1, Stream1)); + nofin -> + terminate(State, {connection_error, h3_excessive_load, + 'Ignored frame larger than limit. (RFC9114 10.5)'}); + fin -> + terminate(State, {connection_error, h3_frame_error, + 'Last frame on stream was truncated. (RFC9114 7.1)'}) + end; + {ignore, Rest} -> + parse(ignored_frame(State, Stream), StreamID, Rest, IsFin); + Error = {connection_error, _, _} -> + terminate(State, Error); + more when Data =:= <<>> -> + %% The buffer was already reset to <<>>. + loop(stream_store(State, Stream)); + more -> + %% We're at the end of the data so FrameIsFin is equivalent to IsFin. + case IsFin of + nofin -> + loop(stream_store(State, Stream#stream{buffer=Data})); + fin -> + terminate(State, {connection_error, h3_frame_error, + 'Last frame on stream was truncated. (RFC9114 7.1)'}) + end + end. + +%% We may receive multiple frames in a single QUIC packet. +%% The FIN flag applies to the QUIC packet, not to the frame. +%% We must therefore only consider the frame to have a FIN +%% flag if there's no data remaining to be read. +is_fin(fin, <<>>) -> fin; +is_fin(_, _) -> nofin. + +parse_unidirectional_stream_header(State0=#state{http3_machine=HTTP3Machine0}, + Stream0=#stream{id=StreamID}, Data, IsFin) -> + case cow_http3:parse_unidi_stream_header(Data) of + {ok, Type, Rest} when Type =:= control; Type =:= encoder; Type =:= decoder -> + case cow_http3_machine:set_unidi_remote_stream_type( + StreamID, Type, HTTP3Machine0) of + {ok, HTTP3Machine} -> + State = State0#state{http3_machine=HTTP3Machine}, + Stream = Stream0#stream{status={unidi, Type}}, + parse(stream_store(State, Stream), StreamID, Rest, IsFin); + {error, Error={connection_error, _, _}, HTTP3Machine} -> + terminate(State0#state{http3_machine=HTTP3Machine}, Error) + end; + %% @todo Perhaps do this in cow_http3_machine directly. + {ok, push, _} -> + terminate(State0, {connection_error, h3_stream_creation_error, + 'Only servers can push. (RFC9114 6.2.2)'}); + {ok, {webtransport, SessionID}, Rest} -> + become_webtransport_stream(State0, Stream0, unidi, SessionID, Rest, IsFin); + %% Unknown stream types must be ignored. We choose to abort the + %% stream instead of reading and discarding the incoming data. + {undefined, _} -> + loop(stream_abort_receive(State0, Stream0, h3_stream_creation_error)); + %% Very unlikely to happen but WebTransport headers may be fragmented + %% as they are more than one byte. The fin flag in this case is an error, + %% but because it happens in WebTransport application data (the Session ID) + %% we only reset the impacted stream and not the entire connection. + more when IsFin =:= fin -> + loop(stream_abort_receive(State0, Stream0, h3_stream_creation_error)); + more -> + loop(stream_store(State0, Stream0#stream{buffer=Data})) + end. + +frame(State=#state{http3_machine=HTTP3Machine0}, + Stream=#stream{id=StreamID}, Frame, IsFin) -> + case cow_http3_machine:frame(Frame, IsFin, StreamID, HTTP3Machine0) of + {ok, HTTP3Machine} -> + State#state{http3_machine=HTTP3Machine}; + {ok, {data, Data}, HTTP3Machine} -> + data_frame(State#state{http3_machine=HTTP3Machine}, Stream, IsFin, Data); + {ok, {headers, Headers, PseudoHeaders, BodyLen}, Instrs, HTTP3Machine} -> + headers_frame(send_instructions(State#state{http3_machine=HTTP3Machine}, Instrs), + Stream, IsFin, Headers, PseudoHeaders, BodyLen); + {ok, {trailers, _Trailers}, Instrs, HTTP3Machine} -> + %% @todo Propagate trailers. + send_instructions(State#state{http3_machine=HTTP3Machine}, Instrs); + {ok, GoAway={goaway, _}, HTTP3Machine} -> + goaway(State#state{http3_machine=HTTP3Machine}, GoAway); + {error, Error={stream_error, _Reason, _Human}, Instrs, HTTP3Machine} -> + State1 = send_instructions(State#state{http3_machine=HTTP3Machine}, Instrs), + reset_stream(State1, Stream, Error); + {error, Error={connection_error, _, _}, HTTP3Machine} -> + terminate(State#state{http3_machine=HTTP3Machine}, Error) + end. + +data_frame(State=#state{opts=Opts}, + Stream=#stream{id=StreamID, state=StreamState0}, IsFin, Data) -> + try cowboy_stream:data(StreamID, IsFin, Data, StreamState0) of + {Commands, StreamState} -> + commands(State, Stream#stream{state=StreamState}, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(data, + [StreamID, IsFin, Data, StreamState0], + Class, Exception, Stacktrace), Opts), + reset_stream(State, Stream, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:data/4.'}) + end. + +headers_frame(State, Stream, IsFin, Headers, + PseudoHeaders=#{method := <<"CONNECT">>}, _) + when map_size(PseudoHeaders) =:= 2 -> + early_error(State, Stream, IsFin, Headers, PseudoHeaders, 501, + 'The CONNECT method is currently not implemented. (RFC7231 4.3.6)'); +headers_frame(State, Stream, IsFin, Headers, + PseudoHeaders=#{method := <<"TRACE">>}, _) -> + early_error(State, Stream, IsFin, Headers, PseudoHeaders, 501, + 'The TRACE method is currently not implemented. (RFC9114 4.4, RFC7231 4.3.8)'); +headers_frame(State, Stream, IsFin, Headers, PseudoHeaders=#{authority := Authority}, BodyLen) -> + headers_frame_parse_host(State, Stream, IsFin, Headers, PseudoHeaders, BodyLen, Authority); +headers_frame(State, Stream, IsFin, Headers, PseudoHeaders, BodyLen) -> + case lists:keyfind(<<"host">>, 1, Headers) of + {_, Authority} -> + headers_frame_parse_host(State, Stream, IsFin, Headers, PseudoHeaders, BodyLen, Authority); + _ -> + reset_stream(State, Stream, {stream_error, h3_message_error, + 'Requests translated from HTTP/1.1 must include a host header. (RFC7540 8.1.2.3, RFC7230 5.4)'}) + end. + +headers_frame_parse_host(State=#state{ref=Ref, peer=Peer, sock=Sock, cert=Cert}, + Stream=#stream{id=StreamID}, IsFin, Headers, + PseudoHeaders=#{method := Method, scheme := Scheme, path := PathWithQs}, + BodyLen, Authority) -> + try cow_http_hd:parse_host(Authority) of + {Host, Port0} -> + Port = ensure_port(Scheme, Port0), + try cow_http:parse_fullpath(PathWithQs) of + {<<>>, _} -> + reset_stream(State, Stream, {stream_error, h3_message_error, + 'The path component must not be empty. (RFC7540 8.1.2.3)'}); + {Path, Qs} -> + Req0 = #{ + ref => Ref, + pid => self(), + streamid => StreamID, + peer => Peer, + sock => Sock, + cert => Cert, + method => Method, + scheme => Scheme, + host => Host, + port => Port, + path => Path, + qs => Qs, + version => 'HTTP/3', + headers => headers_to_map(Headers, #{}), + has_body => IsFin =:= nofin, + body_length => BodyLen + }, + %% We add the protocol information for extended CONNECTs. + Req = case PseudoHeaders of + #{protocol := Protocol} -> Req0#{protocol => Protocol}; + _ -> Req0 + end, + headers_frame(State, Stream, Req) + catch _:_ -> + reset_stream(State, Stream, {stream_error, h3_message_error, + 'The :path pseudo-header is invalid. (RFC7540 8.1.2.3)'}) + end + catch _:_ -> + reset_stream(State, Stream, {stream_error, h3_message_error, + 'The :authority pseudo-header is invalid. (RFC7540 8.1.2.3)'}) + end. + +%% @todo Copied from cowboy_http2. +%% @todo How to handle "http"? +ensure_port(<<"http">>, undefined) -> 80; +ensure_port(<<"https">>, undefined) -> 443; +ensure_port(_, Port) -> Port. + +%% @todo Copied from cowboy_http2. +%% This function is necessary to properly handle duplicate headers +%% and the special-case cookie header. +headers_to_map([], Acc) -> + Acc; +headers_to_map([{Name, Value}|Tail], Acc0) -> + Acc = case Acc0 of + %% The cookie header does not use proper HTTP header lists. + #{Name := Value0} when Name =:= <<"cookie">> -> + Acc0#{Name => << Value0/binary, "; ", Value/binary >>}; + #{Name := Value0} -> + Acc0#{Name => << Value0/binary, ", ", Value/binary >>}; + _ -> + Acc0#{Name => Value} + end, + headers_to_map(Tail, Acc). + +%% @todo WebTransport CONNECT requests must have extra checks on settings. +%% @todo We may also need to defer them if we didn't get settings. +headers_frame(State=#state{opts=Opts}, Stream=#stream{id=StreamID}, Req) -> + try cowboy_stream:init(StreamID, Req, Opts) of + {Commands, StreamState} -> + commands(State, Stream#stream{state=StreamState}, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(init, + [StreamID, Req, Opts], + Class, Exception, Stacktrace), Opts), + reset_stream(State, Stream, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:init/3.'}) + end. + +early_error(State0=#state{ref=Ref, opts=Opts, peer=Peer}, + Stream=#stream{id=StreamID}, _IsFin, Headers, #{method := Method}, + StatusCode0, HumanReadable) -> + %% We automatically terminate the stream but it is not an error + %% per se (at least not in the first implementation). + Reason = {stream_error, h3_no_error, HumanReadable}, + %% The partial Req is minimal for now. We only have one case + %% where it can be called (when a method is completely disabled). + PartialReq = #{ + ref => Ref, + peer => Peer, + method => Method, + headers => headers_to_map(Headers, #{}) + }, + Resp = {response, StatusCode0, RespHeaders0=#{<<"content-length">> => <<"0">>}, <<>>}, + try cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts) of + {response, StatusCode, RespHeaders, RespBody} -> + send_response(State0, Stream, StatusCode, RespHeaders, RespBody) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(early_error, + [StreamID, Reason, PartialReq, Resp, Opts], + Class, Exception, Stacktrace), Opts), + %% We still need to send an error response, so send what we initially + %% wanted to send. It's better than nothing. + send_headers(State0, Stream, fin, StatusCode0, RespHeaders0) + end. + +%% Datagrams. + +parse_datagram(State, Data0) -> + {SessionID, Data} = cow_http3:parse_datagram(Data0), + case stream_get(State, SessionID) of + #stream{status={webtransport_session, _}} -> + webtransport_event(State, SessionID, {datagram, Data}), + loop(State); + _ -> + error(todo) %% @todo Might be a future WT session or an error. + end. + +%% Erlang messages. + +down(State0=#state{opts=Opts, children=Children0}, Pid, Msg) -> + State = case cowboy_children:down(Children0, Pid) of + %% The stream was terminated already. + {ok, undefined, Children} -> + State0#state{children=Children}; + %% The stream is still running. + {ok, StreamID, Children} -> + info(State0#state{children=Children}, StreamID, Msg); + %% The process was unknown. + error -> + cowboy:log(warning, "Received EXIT signal ~p for unknown process ~p.~n", + [Msg, Pid], Opts), + State0 + end, + if +%% @todo +% State#state.http2_status =:= closing, State#state.streams =:= #{} -> +% terminate(State, {stop, normal, 'The connection is going away.'}); + true -> + State + end. + +info(State=#state{opts=Opts, http3_machine=_HTTP3Machine}, StreamID, Msg) -> + case stream_get(State, StreamID) of + Stream=#stream{state=StreamState0} -> + try cowboy_stream:info(StreamID, Msg, StreamState0) of + {Commands, StreamState} -> + commands(State, Stream#stream{state=StreamState}, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(info, + [StreamID, Msg, StreamState0], + Class, Exception, Stacktrace), Opts), + reset_stream(State, Stream, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:info/3.'}) + end; + error -> + case is_lingering_stream(State, StreamID) of + true -> + ok; + false -> + cowboy:log(warning, "Received message ~p for unknown stream ~p.", + [Msg, StreamID], Opts) + end, + State + end. + +%% Stream handler commands. + +commands(State, Stream, []) -> + stream_store(State, Stream); +%% Error responses are sent only if a response wasn't sent already. +commands(State=#state{http3_machine=HTTP3Machine}, Stream=#stream{id=StreamID}, + [{error_response, StatusCode, Headers, Body}|Tail]) -> + case cow_http3_machine:get_bidi_stream_local_state(StreamID, HTTP3Machine) of + {ok, idle} -> + commands(State, Stream, [{response, StatusCode, Headers, Body}|Tail]); + _ -> + commands(State, Stream, Tail) + end; +%% Send an informational response. +commands(State0, Stream, [{inform, StatusCode, Headers}|Tail]) -> + State = send_headers(State0, Stream, idle, StatusCode, Headers), + commands(State, Stream, Tail); +%% Send response headers. +commands(State0, Stream, [{response, StatusCode, Headers, Body}|Tail]) -> + State = send_response(State0, Stream, StatusCode, Headers, Body), + commands(State, Stream, Tail); +%% Send response headers. +commands(State0, Stream, [{headers, StatusCode, Headers}|Tail]) -> + State = send_headers(State0, Stream, nofin, StatusCode, Headers), + commands(State, Stream, Tail); +%%% Send a response body chunk. +commands(State0=#state{conn=Conn}, Stream=#stream{id=StreamID}, [{data, IsFin, Data}|Tail]) -> + _ = case Data of + {sendfile, Offset, Bytes, Path} -> + %% Temporary solution to do sendfile over QUIC. + {ok, _} = ranch_transport:sendfile(?MODULE, {Conn, StreamID}, + Path, Offset, Bytes, []), + ok = maybe_socket_error(State0, + cowboy_quicer:send(Conn, StreamID, cow_http3:data(<<>>), IsFin)); + _ -> + ok = maybe_socket_error(State0, + cowboy_quicer:send(Conn, StreamID, cow_http3:data(Data), IsFin)) + end, + State = maybe_send_is_fin(State0, Stream, IsFin), + commands(State, Stream, Tail); +%%% Send trailers. +commands(State0=#state{conn=Conn, http3_machine=HTTP3Machine0}, + Stream=#stream{id=StreamID}, [{trailers, Trailers}|Tail]) -> + State = case cow_http3_machine:prepare_trailers( + StreamID, HTTP3Machine0, maps:to_list(Trailers)) of + {trailers, HeaderBlock, Instrs, HTTP3Machine} -> + State1 = send_instructions(State0#state{http3_machine=HTTP3Machine}, Instrs), + ok = maybe_socket_error(State1, + cowboy_quicer:send(Conn, StreamID, cow_http3:headers(HeaderBlock), fin)), + State1; + {no_trailers, HTTP3Machine} -> + ok = maybe_socket_error(State0, + cowboy_quicer:send(Conn, StreamID, cow_http3:data(<<>>), fin)), + State0#state{http3_machine=HTTP3Machine} + end, + commands(State, Stream, Tail); +%% Send a push promise. +%% +%% @todo Responses sent as a result of a push_promise request +%% must not send push_promise frames themselves. +%% +%% @todo We should not send push_promise frames when we are +%% in the closing http2_status. +%commands(State0=#state{socket=Socket, transport=Transport, http3_machine=HTTP3Machine0}, +% Stream, [{push, Method, Scheme, Host, Port, Path, Qs, Headers0}|Tail]) -> +% Authority = case {Scheme, Port} of +% {<<"http">>, 80} -> Host; +% {<<"https">>, 443} -> Host; +% _ -> iolist_to_binary([Host, $:, integer_to_binary(Port)]) +% end, +% PathWithQs = iolist_to_binary(case Qs of +% <<>> -> Path; +% _ -> [Path, $?, Qs] +% end), +% PseudoHeaders = #{ +% method => Method, +% scheme => Scheme, +% authority => Authority, +% path => PathWithQs +% }, +% %% We need to make sure the header value is binary before we can +% %% create the Req object, as it expects them to be flat. +% Headers = maps:to_list(maps:map(fun(_, V) -> iolist_to_binary(V) end, Headers0)), +% %% @todo +% State = case cow_http2_machine:prepare_push_promise(StreamID, HTTP3Machine0, +% PseudoHeaders, Headers) of +% {ok, PromisedStreamID, HeaderBlock, HTTP3Machine} -> +% Transport:send(Socket, cow_http2:push_promise( +% StreamID, PromisedStreamID, HeaderBlock)), +% headers_frame(State0#state{http3_machine=HTTP2Machine}, +% PromisedStreamID, fin, Headers, PseudoHeaders, 0); +% {error, no_push} -> +% State0 +% end, +% commands(State, Stream, Tail); +%%% Read the request body. +%commands(State0=#state{flow=Flow, streams=Streams}, Stream, [{flow, Size}|Tail]) -> +commands(State, Stream, [{flow, _Size}|Tail]) -> + %% @todo We should tell the QUIC stream to increase its window size. +% #{StreamID := Stream=#stream{flow=StreamFlow}} = Streams, +% State = update_window(State0#state{flow=Flow + Size, +% streams=Streams#{StreamID => Stream#stream{flow=StreamFlow + Size}}}, +% StreamID), + commands(State, Stream, Tail); +%% Supervise a child process. +commands(State=#state{children=Children}, Stream=#stream{id=StreamID}, + [{spawn, Pid, Shutdown}|Tail]) -> + commands(State#state{children=cowboy_children:up(Children, Pid, StreamID, Shutdown)}, + Stream, Tail); +%% Error handling. +commands(State, Stream, [Error = {internal_error, _, _}|_Tail]) -> + %% @todo Do we want to run the commands after an internal_error? + %% @todo Do we even allow commands after? + %% @todo Only reset when the stream still exists. + reset_stream(State, Stream, Error); +%% Use a different protocol within the stream (CONNECT :protocol). +%% @todo Make sure we error out when the feature is disabled. +commands(State0, Stream0=#stream{id=StreamID}, + [{switch_protocol, Headers, cowboy_webtransport, WTState=#{}}|Tail]) -> + State = info(stream_store(State0, Stream0), StreamID, {headers, 200, Headers}), + #state{http3_machine=HTTP3Machine0} = State, + Stream1 = #stream{state=StreamState} = stream_get(State, StreamID), + %% The stream becomes a WT session at that point. It is the + %% parent stream of all streams in this WT session. The + %% cowboy_stream state is kept because it will be needed + %% to terminate the stream properly. + HTTP3Machine = cow_http3_machine:become_webtransport_session(StreamID, HTTP3Machine0), + Stream = Stream1#stream{ + status={webtransport_session, normal}, + state={cowboy_webtransport, WTState#{stream_state => StreamState}} + }, + %% @todo We must propagate the buffer to capsule handling if any. + commands(State#state{http3_machine=HTTP3Machine}, Stream, Tail); +commands(State0, Stream0=#stream{id=StreamID}, + [{switch_protocol, Headers, _Mod, _ModState}|Tail]) -> + State = info(stream_store(State0, Stream0), StreamID, {headers, 200, Headers}), + Stream = stream_get(State, StreamID), + commands(State, Stream, Tail); +%% Set options dynamically. +commands(State, Stream, [{set_options, _Opts}|Tail]) -> + commands(State, Stream, Tail); +commands(State, Stream, [stop|_Tail]) -> + %% @todo Do we want to run the commands after a stop? + %% @todo Do we even allow commands after? + stop_stream(State, Stream); +%% Log event. +commands(State=#state{opts=Opts}, Stream, [Log={log, _, _, _}|Tail]) -> + cowboy:log(Log, Opts), + commands(State, Stream, Tail). + +send_response(State0=#state{conn=Conn, http3_machine=HTTP3Machine0}, + Stream=#stream{id=StreamID}, StatusCode, Headers, Body) -> + Size = case Body of + {sendfile, _, Bytes0, _} -> Bytes0; + _ -> iolist_size(Body) + end, + case Size of + 0 -> + State = send_headers(State0, Stream, fin, StatusCode, Headers), + maybe_send_is_fin(State, Stream, fin); + _ -> + %% @todo Add a test for HEAD to make sure we don't send the body when + %% returning {response...} from a stream handler (or {headers...} then {data...}). + {ok, _IsFin, HeaderBlock, Instrs, HTTP3Machine} + = cow_http3_machine:prepare_headers(StreamID, HTTP3Machine0, nofin, + #{status => cow_http:status_to_integer(StatusCode)}, + headers_to_list(Headers)), + State = send_instructions(State0#state{http3_machine=HTTP3Machine}, Instrs), + %% @todo It might be better to do async sends. + _ = case Body of + {sendfile, Offset, Bytes, Path} -> + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, StreamID, cow_http3:headers(HeaderBlock))), + %% Temporary solution to do sendfile over QUIC. + {ok, _} = maybe_socket_error(State, + ranch_transport:sendfile(?MODULE, {Conn, StreamID}, + Path, Offset, Bytes, [])), + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, StreamID, cow_http3:data(<<>>), fin)); + _ -> + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, StreamID, [ + cow_http3:headers(HeaderBlock), + cow_http3:data(Body) + ], fin)) + end, + maybe_send_is_fin(State, Stream, fin) + end. + +maybe_send_is_fin(State=#state{http3_machine=HTTP3Machine0}, + Stream=#stream{id=StreamID}, fin) -> + HTTP3Machine = cow_http3_machine:close_bidi_stream_for_sending(StreamID, HTTP3Machine0), + maybe_terminate_stream(State#state{http3_machine=HTTP3Machine}, Stream); +maybe_send_is_fin(State, _, _) -> + State. + +%% Temporary callback to do sendfile over QUIC. +-spec send({cowboy_quicer:quicer_connection_handle(), cow_http3:stream_id()}, + iodata()) -> ok | {error, any()}. + +send({Conn, StreamID}, IoData) -> + cowboy_quicer:send(Conn, StreamID, cow_http3:data(IoData)). + +send_headers(State0=#state{conn=Conn, http3_machine=HTTP3Machine0}, + #stream{id=StreamID}, IsFin0, StatusCode, Headers) -> + {ok, IsFin, HeaderBlock, Instrs, HTTP3Machine} + = cow_http3_machine:prepare_headers(StreamID, HTTP3Machine0, IsFin0, + #{status => cow_http:status_to_integer(StatusCode)}, + headers_to_list(Headers)), + State = send_instructions(State0#state{http3_machine=HTTP3Machine}, Instrs), + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, StreamID, cow_http3:headers(HeaderBlock), IsFin)), + State. + +%% The set-cookie header is special; we can only send one cookie per header. +headers_to_list(Headers0=#{<<"set-cookie">> := SetCookies}) -> + Headers = maps:to_list(maps:remove(<<"set-cookie">>, Headers0)), + Headers ++ [{<<"set-cookie">>, Value} || Value <- SetCookies]; +headers_to_list(Headers) -> + maps:to_list(Headers). + +%% @todo We would open unidi streams here if we only open on-demand. +%% No instructions. +send_instructions(State, undefined) -> + State; +%% Decoder instructions. +send_instructions(State=#state{conn=Conn, local_decoder_id=DecoderID}, + {decoder_instructions, DecData}) -> + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, DecoderID, DecData)), + State; +%% Encoder instructions. +send_instructions(State=#state{conn=Conn, local_encoder_id=EncoderID}, + {encoder_instructions, EncData}) -> + ok = maybe_socket_error(State, + cowboy_quicer:send(Conn, EncoderID, EncData)), + State. + +%% We mark the stream as being a WebTransport stream +%% and then continue parsing the data as a WebTransport +%% stream. This function is common for incoming unidi +%% and bidi streams. +become_webtransport_stream(State0=#state{http3_machine=HTTP3Machine0}, + Stream0=#stream{id=StreamID}, StreamType, SessionID, Rest, IsFin) -> + case cow_http3_machine:become_webtransport_stream(StreamID, SessionID, HTTP3Machine0) of + {ok, HTTP3Machine} -> + State = State0#state{http3_machine=HTTP3Machine}, + Stream = Stream0#stream{status={webtransport_stream, SessionID}}, + webtransport_event(State, SessionID, {stream_open, StreamID, StreamType}), + %% We don't need to parse the remaining data if there isn't any. + case {Rest, IsFin} of + {<<>>, nofin} -> loop(stream_store(State, Stream)); + _ -> parse(stream_store(State, Stream), StreamID, Rest, IsFin) + end + %% @todo Error conditions. + end. + +webtransport_event(State, SessionID, Event) -> + #stream{ + status={webtransport_session, _}, + state={cowboy_webtransport, #{session_pid := SessionPid}} + } = stream_get(State, SessionID), + SessionPid ! {'$webtransport_event', SessionID, Event}, + ok. + +webtransport_commands(State, SessionID, Commands) -> + case stream_get(State, SessionID) of + Session = #stream{status={webtransport_session, _}} -> + wt_commands(State, Session, Commands); + %% The stream has been terminated, ignore pending commands. + error -> + State + end. + +wt_commands(State, _, []) -> + State; +wt_commands(State0=#state{conn=Conn}, Session=#stream{id=SessionID}, + [{open_stream, OpenStreamRef, StreamType, InitialData}|Tail]) -> + %% Because opening the stream involves sending a short header + %% we necessarily write data. The InitialData variable allows + %% providing additional data to be sent in the same packet. + StartF = case StreamType of + bidi -> start_bidi_stream; + unidi -> start_unidi_stream + end, + Header = cow_http3:webtransport_stream_header(SessionID, StreamType), + case cowboy_quicer:StartF(Conn, [Header, InitialData]) of + {ok, StreamID} -> + %% @todo Pass Session directly? + webtransport_event(State0, SessionID, + {opened_stream_id, OpenStreamRef, StreamID}), + State = stream_new_local(State0, StreamID, StreamType, + {webtransport_stream, SessionID}), + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end; +wt_commands(State, Session, [{close_stream, StreamID, Code}|Tail]) -> + %% @todo Check that StreamID belongs to Session. + error({todo, State, Session, [{close_stream, StreamID, Code}|Tail]}); +wt_commands(State=#state{conn=Conn}, Session=#stream{id=SessionID}, + [{send, datagram, Data}|Tail]) -> + case cowboy_quicer:send_datagram(Conn, cow_http3:datagram(SessionID, Data)) of + ok -> + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end; +wt_commands(State=#state{conn=Conn}, Session, [{send, StreamID, Data}|Tail]) -> + %% @todo Check that StreamID belongs to Session. + case cowboy_quicer:send(Conn, StreamID, Data, nofin) of + ok -> + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end; +wt_commands(State=#state{conn=Conn}, Session, [{send, StreamID, IsFin, Data}|Tail]) -> + %% @todo Check that StreamID belongs to Session. + case cowboy_quicer:send(Conn, StreamID, Data, IsFin) of + ok -> + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end; +wt_commands(State=#state{conn=Conn}, Session=#stream{id=SessionID}, [initiate_close|Tail]) -> + %% We must send a WT_DRAIN_SESSION capsule on the CONNECT stream. + Capsule = cow_capsule:wt_drain_session(), + case cowboy_quicer:send(Conn, SessionID, Capsule, nofin) of + ok -> + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end; +wt_commands(State0=#state{conn=Conn}, Session=#stream{id=SessionID}, [Cmd|Tail]) + when Cmd =:= close; element(1, Cmd) =:= close -> + %% We must send a WT_CLOSE_SESSION capsule on the CONNECT stream. + {AppCode, AppMsg} = case Cmd of + close -> {0, <<>>}; + {close, AppCode0} -> {AppCode0, <<>>}; + {close, AppCode0, AppMsg0} -> {AppCode0, AppMsg0} + end, + Capsule = cow_capsule:wt_close_session(AppCode, AppMsg), + case cowboy_quicer:send(Conn, SessionID, Capsule, fin) of + ok -> + State = webtransport_terminate_session(State0, Session), + %% @todo Because the handler is in a separate process + %% we must wait for it to stop and eventually + %% kill the process if it takes too long. + %% @todo We may need to fully close the CONNECT stream (if remote doesn't reset it). + wt_commands(State, Session, Tail) + %% @todo Handle errors. + end. + +webtransport_terminate_session(State=#state{conn=Conn, http3_machine=HTTP3Machine0, + streams=Streams0, lingering_streams=Lingering0}, #stream{id=SessionID}) -> + %% Reset/abort the WT streams. + Streams = maps:filtermap(fun + (_, #stream{id=StreamID, status={webtransport_session, _}}) + when StreamID =:= SessionID -> + %% We remove the session stream but do the shutdown outside this function. + false; + (StreamID, #stream{status={webtransport_stream, StreamSessionID}}) + when StreamSessionID =:= SessionID -> + cowboy_quicer:shutdown_stream(Conn, StreamID, + both, cow_http3:error_to_code(wt_session_gone)), + false; + (_, _) -> + true + end, Streams0), + %% Keep the streams in lingering state. + %% We only keep up to 100 streams in this state. @todo Make it configurable? + Terminated = maps:keys(Streams0) -- maps:keys(Streams), + Lingering = lists:sublist(Terminated ++ Lingering0, 100), + %% Update the HTTP3 state machine. + HTTP3Machine = cow_http3_machine:close_webtransport_session(SessionID, HTTP3Machine0), + State#state{ + http3_machine=HTTP3Machine, + streams=Streams, + lingering_streams=Lingering + }. + +stream_peer_send_shutdown(State=#state{conn=Conn}, StreamID) -> + case stream_get(State, StreamID) of + %% Cleanly terminating the CONNECT stream is equivalent + %% to an application error code of 0 and empty message. + Stream = #stream{status={webtransport_session, _}} -> + webtransport_event(State, StreamID, {closed, 0, <<>>}), + %% Shutdown the CONNECT stream fully. + cowboy_quicer:shutdown_stream(Conn, StreamID), + webtransport_terminate_session(State, Stream); + _ -> + State + end. + +reset_stream(State0=#state{conn=Conn, http3_machine=HTTP3Machine0}, + Stream=#stream{id=StreamID}, Error) -> + Reason = case Error of + {internal_error, _, _} -> h3_internal_error; + {stream_error, Reason0, _} -> Reason0 + end, + %% @todo Do we want to close both sides? + %% @todo Should we close the send side if the receive side was already closed? + cowboy_quicer:shutdown_stream(Conn, StreamID, + both, cow_http3:error_to_code(Reason)), + State1 = case cow_http3_machine:reset_stream(StreamID, HTTP3Machine0) of + {ok, HTTP3Machine} -> + terminate_stream(State0#state{http3_machine=HTTP3Machine}, Stream, Error); + {error, not_found} -> + terminate_stream(State0, Stream, Error) + end, +%% @todo +% case reset_rate(State1) of +% {ok, State} -> +% State; +% error -> +% terminate(State1, {connection_error, enhance_your_calm, +% 'Stream reset rate larger than configuration allows. Flood? (CVE-2019-9514)'}) +% end. + State1. + +stop_stream(State0=#state{http3_machine=HTTP3Machine}, Stream=#stream{id=StreamID}) -> + %% We abort reading when stopping the stream but only + %% if the client was not finished sending data. + %% We mark the stream as 'stopping' either way. + State = case cow_http3_machine:get_bidi_stream_remote_state(StreamID, HTTP3Machine) of + {ok, fin} -> + stream_store(State0, Stream#stream{status=stopping}); + {error, not_found} -> + stream_store(State0, Stream#stream{status=stopping}); + _ -> + stream_abort_receive(State0, Stream, h3_no_error) + end, + %% Then we may need to send a response or terminate it + %% if the stream handler did not do so already. + case cow_http3_machine:get_bidi_stream_local_state(StreamID, HTTP3Machine) of + %% When the stream terminates normally (without resetting the stream) + %% and no response was sent, we need to send a proper response back to the client. + {ok, idle} -> + info(State, StreamID, {response, 204, #{}, <<>>}); + %% When a response was sent but not terminated, we need to close the stream. + %% We send a final DATA frame to complete the stream. + {ok, nofin} -> + info(State, StreamID, {data, fin, <<>>}); + %% When a response was sent fully we can terminate the stream, + %% regardless of the stream being in half-closed or closed state. + _ -> + terminate_stream(State, Stream, normal) + end. + +maybe_terminate_stream(State, Stream=#stream{status=stopping}) -> + terminate_stream(State, Stream, normal); +%% The Stream will be stored in the State at the end of commands processing. +maybe_terminate_stream(State, _) -> + State. + +terminate_stream(State=#state{streams=Streams0, children=Children0}, + #stream{id=StreamID, state=StreamState}, Reason) -> + Streams = maps:remove(StreamID, Streams0), + terminate_stream_handler(State, StreamID, Reason, StreamState), + Children = cowboy_children:shutdown(Children0, StreamID), + stream_linger(State#state{streams=Streams, children=Children}, StreamID). + +terminate_stream_handler(#state{opts=Opts}, StreamID, Reason, StreamState) -> + try + cowboy_stream:terminate(StreamID, Reason, StreamState) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(terminate, + [StreamID, Reason, StreamState], + Class, Exception, Stacktrace), Opts) + end. + +ignored_frame(State=#state{http3_machine=HTTP3Machine0}, #stream{id=StreamID}) -> + case cow_http3_machine:ignored_frame(StreamID, HTTP3Machine0) of + {ok, HTTP3Machine} -> + State#state{http3_machine=HTTP3Machine}; + {error, Error={connection_error, _, _}, HTTP3Machine} -> + terminate(State#state{http3_machine=HTTP3Machine}, Error) + end. + +stream_abort_receive(State=#state{conn=Conn}, Stream=#stream{id=StreamID}, Reason) -> + cowboy_quicer:shutdown_stream(Conn, StreamID, + receiving, cow_http3:error_to_code(Reason)), + stream_store(State, Stream#stream{status=stopping}). + +%% @todo Graceful connection shutdown. +%% We terminate the connection immediately if it hasn't fully been initialized. +-spec goaway(#state{}, {goaway, _}) -> no_return(). +goaway(State, {goaway, _}) -> + terminate(State, {stop, goaway, 'The connection is going away.'}). + +%% Function copied from cowboy_http. +maybe_socket_error(State, {error, closed}) -> + terminate(State, {socket_error, closed, 'The socket has been closed.'}); +maybe_socket_error(State, Reason) -> + maybe_socket_error(State, Reason, 'An error has occurred on the socket.'). + +maybe_socket_error(_, Result = ok, _) -> + Result; +maybe_socket_error(_, Result = {ok, _}, _) -> + Result; +maybe_socket_error(State, {error, Reason}, Human) -> + terminate(State, {socket_error, Reason, Human}). + +-spec terminate(#state{} | undefined, _) -> no_return(). +terminate(undefined, Reason) -> + exit({shutdown, Reason}); +terminate(State=#state{conn=Conn, %http3_status=Status, + %http3_machine=HTTP3Machine, + streams=Streams, children=Children}, Reason) -> +% if +% Status =:= connected; Status =:= closing_initiated -> +%% @todo +% %% We are terminating so it's OK if we can't send the GOAWAY anymore. +% _ = cowboy_quicer:send(Conn, ControlID, cow_http3:goaway( +% cow_http3_machine:get_last_streamid(HTTP3Machine))), + %% We already sent the GOAWAY frame. +% Status =:= closing -> +% ok +% end, + terminate_all_streams(State, maps:to_list(Streams), Reason), + cowboy_children:terminate(Children), +% terminate_linger(State), + _ = cowboy_quicer:shutdown(Conn, cow_http3:error_to_code(terminate_reason(Reason))), + exit({shutdown, Reason}). + +terminate_reason({connection_error, Reason, _}) -> Reason; +terminate_reason({stop, _, _}) -> h3_no_error; +terminate_reason({socket_error, _, _}) -> h3_internal_error. +%terminate_reason({internal_error, _, _}) -> internal_error. + +terminate_all_streams(_, [], _) -> + ok; +terminate_all_streams(State, [{StreamID, #stream{state=StreamState}}|Tail], Reason) -> + terminate_stream_handler(State, StreamID, Reason, StreamState), + terminate_all_streams(State, Tail, Reason). + +stream_get(#state{streams=Streams}, StreamID) -> + maps:get(StreamID, Streams, error). + +stream_new_local(State, StreamID, StreamType, Status) -> + stream_new(State, StreamID, StreamType, unidi_local, Status). + +stream_new_remote(State, StreamID, StreamType) -> + Status = case StreamType of + unidi -> header; + bidi -> normal + end, + stream_new(State, StreamID, StreamType, unidi_remote, Status). + +stream_new(State=#state{http3_machine=HTTP3Machine0, streams=Streams}, + StreamID, StreamType, UnidiType, Status) -> + {HTTP3Machine, Status} = case StreamType of + unidi -> + {cow_http3_machine:init_unidi_stream(StreamID, UnidiType, HTTP3Machine0), + Status}; + bidi -> + {cow_http3_machine:init_bidi_stream(StreamID, HTTP3Machine0), + Status} + end, + Stream = #stream{id=StreamID, status=Status}, + State#state{http3_machine=HTTP3Machine, streams=Streams#{StreamID => Stream}}. + +%% Stream closed message for a local (write-only) unidi stream. +stream_closed(State=#state{local_control_id=StreamID}, StreamID, _) -> + stream_closed1(State, StreamID); +stream_closed(State=#state{local_encoder_id=StreamID}, StreamID, _) -> + stream_closed1(State, StreamID); +stream_closed(State=#state{local_decoder_id=StreamID}, StreamID, _) -> + stream_closed1(State, StreamID); +stream_closed(State=#state{opts=Opts, + streams=Streams0, children=Children0}, StreamID, ErrorCode) -> + case maps:take(StreamID, Streams0) of + %% In the WT session's case, streams will be + %% removed in webtransport_terminate_session. + {Stream=#stream{status={webtransport_session, _}}, _} -> + webtransport_event(State, StreamID, closed_abruptly), + webtransport_terminate_session(State, Stream); + {#stream{state=undefined}, Streams} -> + %% Unidi stream has no handler/children. + stream_closed1(State#state{streams=Streams}, StreamID); + %% We only stop bidi streams if the stream was closed with an error + %% or the stream was already in the process of stopping. + {#stream{status=Status, state=StreamState}, Streams} + when Status =:= stopping; ErrorCode =/= 0 -> + terminate_stream_handler(State, StreamID, closed, StreamState), + Children = cowboy_children:shutdown(Children0, StreamID), + stream_closed1(State#state{streams=Streams, children=Children}, StreamID); + %% Don't remove a stream that terminated properly but + %% has chosen to remain up (custom stream handlers). + {_, _} -> + stream_closed1(State, StreamID); + %% Stream closed message for a stream that has been reset. Ignore. + error -> + case is_lingering_stream(State, StreamID) of + true -> + ok; + false -> + %% We avoid logging the data as it could be quite large. + cowboy:log(warning, "Received stream_closed for unknown stream ~p. ~p ~p", + [StreamID, self(), Streams0], Opts) + end, + State + end. + +stream_closed1(State=#state{http3_machine=HTTP3Machine0}, StreamID) -> + case cow_http3_machine:close_stream(StreamID, HTTP3Machine0) of + {ok, HTTP3Machine} -> + State#state{http3_machine=HTTP3Machine}; + {error, Error={connection_error, _, _}, HTTP3Machine} -> + terminate(State#state{http3_machine=HTTP3Machine}, Error) + end. + +stream_store(State=#state{streams=Streams}, Stream=#stream{id=StreamID}) -> + State#state{streams=Streams#{StreamID => Stream}}. + +stream_linger(State=#state{lingering_streams=Lingering0}, StreamID) -> + %% We only keep up to 100 streams in this state. @todo Make it configurable? + Lingering = [StreamID|lists:sublist(Lingering0, 100 - 1)], + State#state{lingering_streams=Lingering}. + +is_lingering_stream(#state{lingering_streams=Lingering}, StreamID) -> + lists:member(StreamID, Lingering). diff --git a/src/cowboy_loop.erl b/src/cowboy_loop.erl index 21eb96e..629d06e 100644 --- a/src/cowboy_loop.erl +++ b/src/cowboy_loop.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -17,12 +17,15 @@ -export([upgrade/4]). -export([upgrade/5]). --export([loop/4]). +-export([loop/5]). -export([system_continue/3]). -export([system_terminate/4]). -export([system_code_change/4]). +%% From gen_server. +-define(is_timeout(X), ((X) =:= infinity orelse (is_integer(X) andalso (X) >= 0))). + -callback init(Req, any()) -> {ok | module(), Req, any()} | {module(), Req, any(), any()} @@ -41,40 +44,46 @@ -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). upgrade(Req, Env, Handler, HandlerState) -> - loop(Req, Env, Handler, HandlerState). + loop(Req, Env, Handler, HandlerState, infinity). --spec upgrade(Req, Env, module(), any(), hibernate) +-spec upgrade(Req, Env, module(), any(), hibernate | timeout()) -> {suspend, ?MODULE, loop, [any()]} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). upgrade(Req, Env, Handler, HandlerState, hibernate) -> - suspend(Req, Env, Handler, HandlerState). + suspend(Req, Env, Handler, HandlerState); +upgrade(Req, Env, Handler, HandlerState, Timeout) when ?is_timeout(Timeout) -> + loop(Req, Env, Handler, HandlerState, Timeout). --spec loop(Req, Env, module(), any()) +-spec loop(Req, Env, module(), any(), timeout()) -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). %% @todo Handle system messages. -loop(Req=#{pid := Parent}, Env, Handler, HandlerState) -> +loop(Req=#{pid := Parent}, Env, Handler, HandlerState, Timeout) -> receive %% System messages. {'EXIT', Parent, Reason} -> terminate(Req, Env, Handler, HandlerState, Reason); {system, From, Request} -> sys:handle_system_msg(Request, From, Parent, ?MODULE, [], - {Req, Env, Handler, HandlerState}); + {Req, Env, Handler, HandlerState, Timeout}); %% Calls from supervisor module. {'$gen_call', From, Call} -> cowboy_children:handle_supervisor_call(Call, From, [], ?MODULE), - loop(Req, Env, Handler, HandlerState); + loop(Req, Env, Handler, HandlerState, Timeout); Message -> - call(Req, Env, Handler, HandlerState, Message) + call(Req, Env, Handler, HandlerState, Timeout, Message) + after Timeout -> + call(Req, Env, Handler, HandlerState, Timeout, timeout) end. -call(Req0, Env, Handler, HandlerState0, Message) -> +call(Req0, Env, Handler, HandlerState0, Timeout, Message) -> try Handler:info(Message, Req0, HandlerState0) of {ok, Req, HandlerState} -> - loop(Req, Env, Handler, HandlerState); + loop(Req, Env, Handler, HandlerState, Timeout); {ok, Req, HandlerState, hibernate} -> suspend(Req, Env, Handler, HandlerState); + {ok, Req, HandlerState, NewTimeout} when ?is_timeout(NewTimeout) -> + loop(Req, Env, Handler, HandlerState, NewTimeout); {stop, Req, HandlerState} -> terminate(Req, Env, Handler, HandlerState, stop) catch Class:Reason:Stacktrace -> @@ -83,7 +92,7 @@ call(Req0, Env, Handler, HandlerState0, Message) -> end. suspend(Req, Env, Handler, HandlerState) -> - {suspend, ?MODULE, loop, [Req, Env, Handler, HandlerState]}. + {suspend, ?MODULE, loop, [Req, Env, Handler, HandlerState, infinity]}. terminate(Req, Env, Handler, HandlerState, Reason) -> Result = cowboy_handler:terminate(Reason, Req, HandlerState, Handler), @@ -91,15 +100,15 @@ terminate(Req, Env, Handler, HandlerState, Reason) -> %% System callbacks. --spec system_continue(_, _, {Req, Env, module(), any()}) +-spec system_continue(_, _, {Req, Env, module(), any(), timeout()}) -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). -system_continue(_, _, {Req, Env, Handler, HandlerState}) -> - loop(Req, Env, Handler, HandlerState). +system_continue(_, _, {Req, Env, Handler, HandlerState, Timeout}) -> + loop(Req, Env, Handler, HandlerState, Timeout). --spec system_terminate(any(), _, _, {Req, Env, module(), any()}) +-spec system_terminate(any(), _, _, {Req, Env, module(), any(), timeout()}) -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). -system_terminate(Reason, _, _, {Req, Env, Handler, HandlerState}) -> +system_terminate(Reason, _, _, {Req, Env, Handler, HandlerState, _}) -> terminate(Req, Env, Handler, HandlerState, Reason). -spec system_code_change(Misc, _, _, _) -> {ok, Misc} diff --git a/src/cowboy_metrics_h.erl b/src/cowboy_metrics_h.erl index 4107aac..67bf1a6 100644 --- a/src/cowboy_metrics_h.erl +++ b/src/cowboy_metrics_h.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_middleware.erl b/src/cowboy_middleware.erl index 9a739f1..97c1498 100644 --- a/src/cowboy_middleware.erl +++ b/src/cowboy_middleware.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2013-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_quicer.erl b/src/cowboy_quicer.erl new file mode 100644 index 0000000..aa52fae --- /dev/null +++ b/src/cowboy_quicer.erl @@ -0,0 +1,283 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% QUIC transport using the emqx/quicer NIF. + +-module(cowboy_quicer). + +%% Connection. +-export([peername/1]). +-export([sockname/1]). +-export([peercert/1]). +-export([shutdown/2]). + +%% Streams. +-export([start_bidi_stream/2]). +-export([start_unidi_stream/2]). +-export([send/3]). +-export([send/4]). +-export([send_datagram/2]). +-export([shutdown_stream/2]). +-export([shutdown_stream/4]). + +%% Messages. +-export([handle/1]). + +-ifndef(COWBOY_QUICER). + +-spec peername(_) -> no_return(). +peername(_) -> no_quicer(). + +-spec sockname(_) -> no_return(). +sockname(_) -> no_quicer(). + +-spec peercert(_) -> no_return(). +peercert(_) -> no_quicer(). + +-spec shutdown(_, _) -> no_return(). +shutdown(_, _) -> no_quicer(). + +-spec start_bidi_stream(_, _) -> no_return(). +start_bidi_stream(_, _) -> no_quicer(). + +-spec start_unidi_stream(_, _) -> no_return(). +start_unidi_stream(_, _) -> no_quicer(). + +-spec send(_, _, _) -> no_return(). +send(_, _, _) -> no_quicer(). + +-spec send(_, _, _, _) -> no_return(). +send(_, _, _, _) -> no_quicer(). + +-spec send_datagram(_, _) -> no_return(). +send_datagram(_, _) -> no_quicer(). + +-spec shutdown_stream(_, _) -> no_return(). +shutdown_stream(_, _) -> no_quicer(). + +-spec shutdown_stream(_, _, _, _) -> no_return(). +shutdown_stream(_, _, _, _) -> no_quicer(). + +-spec handle(_) -> no_return(). +handle(_) -> no_quicer(). + +no_quicer() -> + error({no_quicer, + "Cowboy must be compiled with environment variable COWBOY_QUICER=1 " + "or with compilation flag -D COWBOY_QUICER=1 in order to enable " + "QUIC support using the emqx/quic NIF"}). + +-else. + +%% @todo Make quicer export these types. +-type quicer_connection_handle() :: reference(). +-export_type([quicer_connection_handle/0]). + +-type quicer_app_errno() :: non_neg_integer(). + +-include_lib("quicer/include/quicer.hrl"). + +%% Connection. + +-spec peername(quicer_connection_handle()) + -> {ok, {inet:ip_address(), inet:port_number()}} + | {error, any()}. + +peername(Conn) -> + quicer:peername(Conn). + +-spec sockname(quicer_connection_handle()) + -> {ok, {inet:ip_address(), inet:port_number()}} + | {error, any()}. + +sockname(Conn) -> + quicer:sockname(Conn). + +-spec peercert(quicer_connection_handle()) + -> {ok, public_key:der_encoded()} + | {error, any()}. + +peercert(Conn) -> + quicer_nif:peercert(Conn). + +-spec shutdown(quicer_connection_handle(), quicer_app_errno()) + -> ok | {error, any()}. + +shutdown(Conn, ErrorCode) -> + quicer:shutdown_connection(Conn, + ?QUIC_CONNECTION_SHUTDOWN_FLAG_NONE, + ErrorCode). + +%% Streams. + +-spec start_bidi_stream(quicer_connection_handle(), iodata()) + -> {ok, cow_http3:stream_id()} + | {error, any()}. + +start_bidi_stream(Conn, InitialData) -> + start_stream(Conn, InitialData, ?QUIC_STREAM_OPEN_FLAG_NONE). + +-spec start_unidi_stream(quicer_connection_handle(), iodata()) + -> {ok, cow_http3:stream_id()} + | {error, any()}. + +start_unidi_stream(Conn, InitialData) -> + start_stream(Conn, InitialData, ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL). + +start_stream(Conn, InitialData, OpenFlag) -> + case quicer:start_stream(Conn, #{ + active => true, + open_flag => OpenFlag}) of + {ok, StreamRef} -> + case quicer:send(StreamRef, InitialData) of + {ok, _} -> + {ok, StreamID} = quicer:get_stream_id(StreamRef), + put({quicer_stream, StreamID}, StreamRef), + {ok, StreamID}; + Error -> + Error + end; + {error, Reason1, Reason2} -> + {error, {Reason1, Reason2}}; + Error -> + Error + end. + +-spec send(quicer_connection_handle(), cow_http3:stream_id(), iodata()) + -> ok | {error, any()}. + +send(Conn, StreamID, Data) -> + send(Conn, StreamID, Data, nofin). + +-spec send(quicer_connection_handle(), cow_http3:stream_id(), iodata(), cow_http:fin()) + -> ok | {error, any()}. + +send(_Conn, StreamID, Data, IsFin) -> + StreamRef = get({quicer_stream, StreamID}), + Size = iolist_size(Data), + case quicer:send(StreamRef, Data, send_flag(IsFin)) of + {ok, Size} -> + ok; + {error, Reason1, Reason2} -> + {error, {Reason1, Reason2}}; + Error -> + Error + end. + +send_flag(nofin) -> ?QUIC_SEND_FLAG_NONE; +send_flag(fin) -> ?QUIC_SEND_FLAG_FIN. + +-spec send_datagram(quicer_connection_handle(), iodata()) + -> ok | {error, any()}. + +send_datagram(Conn, Data) -> + %% @todo Fix/ignore the Dialyzer error instead of doing this. + DataBin = iolist_to_binary(Data), + Size = byte_size(DataBin), + case quicer:send_dgram(Conn, DataBin) of + {ok, Size} -> + ok; + %% @todo Handle error cases. + Error -> + Error + end. + +-spec shutdown_stream(quicer_connection_handle(), cow_http3:stream_id()) + -> ok. + +shutdown_stream(_Conn, StreamID) -> + StreamRef = get({quicer_stream, StreamID}), + _ = quicer:shutdown_stream(StreamRef), + ok. + +-spec shutdown_stream(quicer_connection_handle(), + cow_http3:stream_id(), both | receiving, quicer_app_errno()) + -> ok. + +shutdown_stream(_Conn, StreamID, Dir, ErrorCode) -> + StreamRef = get({quicer_stream, StreamID}), + _ = quicer:shutdown_stream(StreamRef, shutdown_flag(Dir), ErrorCode, infinity), + ok. + +%% @todo Are these flags correct for what we want? +shutdown_flag(both) -> ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT; +shutdown_flag(receiving) -> ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT_RECEIVE. + +%% Messages. + +%% @todo Probably should have the Conn given as argument too? +-spec handle({quic, _, _, _}) + -> {data, cow_http3:stream_id(), cow_http:fin(), binary()} + | {datagram, binary()} + | {stream_started, cow_http3:stream_id(), unidi | bidi} + | {stream_closed, cow_http3:stream_id(), quicer_app_errno()} + | closed + | {peer_send_shutdown, cow_http3:stream_id()} + | ok + | unknown + | {socket_error, any()}. + +handle({quic, Data, StreamRef, #{flags := Flags}}) when is_binary(Data) -> + {ok, StreamID} = quicer:get_stream_id(StreamRef), + IsFin = case Flags band ?QUIC_RECEIVE_FLAG_FIN of + ?QUIC_RECEIVE_FLAG_FIN -> fin; + _ -> nofin + end, + {data, StreamID, IsFin, Data}; +%% @todo Match on Conn. +handle({quic, Data, _Conn, Flags}) when is_binary(Data), is_integer(Flags) -> + {datagram, Data}; +%% QUIC_CONNECTION_EVENT_PEER_STREAM_STARTED. +handle({quic, new_stream, StreamRef, #{flags := Flags}}) -> + case quicer:setopt(StreamRef, active, true) of + ok -> + {ok, StreamID} = quicer:get_stream_id(StreamRef), + put({quicer_stream, StreamID}, StreamRef), + StreamType = case quicer:is_unidirectional(Flags) of + true -> unidi; + false -> bidi + end, + {stream_started, StreamID, StreamType}; + {error, Reason} -> + {socket_error, Reason} + end; +%% QUIC_STREAM_EVENT_SHUTDOWN_COMPLETE. +handle({quic, stream_closed, StreamRef, #{error := ErrorCode}}) -> + {ok, StreamID} = quicer:get_stream_id(StreamRef), + {stream_closed, StreamID, ErrorCode}; +%% QUIC_CONNECTION_EVENT_SHUTDOWN_COMPLETE. +handle({quic, closed, Conn, _Flags}) -> + _ = quicer:close_connection(Conn), + closed; +%% The following events are currently ignored either because +%% I do not know what they do or because we do not need to +%% take action. +handle({quic, streams_available, _Conn, _Props}) -> + ok; +handle({quic, dgram_state_changed, _Conn, _Props}) -> + ok; +%% QUIC_CONNECTION_EVENT_SHUTDOWN_INITIATED_BY_TRANSPORT +handle({quic, transport_shutdown, _Conn, _Flags}) -> + ok; +handle({quic, peer_send_shutdown, StreamRef, undefined}) -> + {ok, StreamID} = quicer:get_stream_id(StreamRef), + {peer_send_shutdown, StreamID}; +handle({quic, send_shutdown_complete, _StreamRef, _IsGraceful}) -> + ok; +handle({quic, shutdown, _Conn, success}) -> + ok; +handle(_Msg) -> + unknown. + +-endif. diff --git a/src/cowboy_req.erl b/src/cowboy_req.erl index 90c5a3a..550054e 100644 --- a/src/cowboy_req.erl +++ b/src/cowboy_req.erl @@ -1,5 +1,5 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> -%% Copyright (c) 2011, Anthony Ramine <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> +%% Copyright (c) Anthony Ramine <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -445,6 +445,7 @@ parse_header_fun(<<"sec-websocket-protocol">>) -> fun cow_http_hd:parse_sec_webs parse_header_fun(<<"sec-websocket-version">>) -> fun cow_http_hd:parse_sec_websocket_version_req/1; parse_header_fun(<<"trailer">>) -> fun cow_http_hd:parse_trailer/1; parse_header_fun(<<"upgrade">>) -> fun cow_http_hd:parse_upgrade/1; +parse_header_fun(<<"wt-available-protocols">>) -> fun cow_http_hd:parse_wt_available_protocols/1; parse_header_fun(<<"x-forwarded-for">>) -> fun cow_http_hd:parse_x_forwarded_for/1. parse_header(Name, Req, Default, ParseFun) -> @@ -462,7 +463,7 @@ filter_cookies(Names0, Req=#{headers := Headers}) -> case header(<<"cookie">>, Req) of undefined -> Req; Value0 -> - Cookies0 = binary:split(Value0, <<$;>>), + Cookies0 = binary:split(Value0, <<$;>>, [global]), Cookies = lists:filter(fun(Cookie) -> lists:member(cookie_name(Cookie), Names) end, Cookies0), @@ -521,7 +522,11 @@ read_body(Req=#{has_read_body := true}, _) -> read_body(Req, Opts) -> Length = maps:get(length, Opts, 8000000), Period = maps:get(period, Opts, 15000), - Timeout = maps:get(timeout, Opts, Period + 1000), + DefaultTimeout = case Period of + infinity -> infinity; %% infinity + 1000 = infinity. + _ -> Period + 1000 + end, + Timeout = maps:get(timeout, Opts, DefaultTimeout), Ref = make_ref(), cast({read_body, self(), Ref, Length, Period}, Req), receive @@ -710,22 +715,43 @@ set_resp_cookie(Name, Value, Req, Opts) -> RespCookies = maps:get(resp_cookies, Req, #{}), Req#{resp_cookies => RespCookies#{Name => Cookie}}. -%% @todo We could add has_resp_cookie and delete_resp_cookie now. +%% @todo We could add has_resp_cookie and unset_resp_cookie now. -spec set_resp_header(binary(), iodata(), Req) -> Req when Req::req(). +set_resp_header(<<"set-cookie">>, _, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); set_resp_header(Name, Value, Req=#{resp_headers := RespHeaders}) -> Req#{resp_headers => RespHeaders#{Name => Value}}; set_resp_header(Name,Value, Req) -> Req#{resp_headers => #{Name => Value}}. --spec set_resp_headers(cowboy:http_headers(), Req) +-spec set_resp_headers(cowboy:http_headers() | [{binary(), iodata()}], Req) -> Req when Req::req(). +set_resp_headers(Headers, Req) when is_list(Headers) -> + set_resp_headers_list(Headers, Req, #{}); +set_resp_headers(#{<<"set-cookie">> := _}, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); set_resp_headers(Headers, Req=#{resp_headers := RespHeaders}) -> Req#{resp_headers => maps:merge(RespHeaders, Headers)}; set_resp_headers(Headers, Req) -> Req#{resp_headers => Headers}. +set_resp_headers_list([], Req, Acc) -> + set_resp_headers(Acc, Req); +set_resp_headers_list([{<<"set-cookie">>, _}|_], _, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); +set_resp_headers_list([{Name, Value}|Tail], Req, Acc) -> + case Acc of + #{Name := ValueAcc} -> + set_resp_headers_list(Tail, Req, Acc#{Name => [ValueAcc, <<", ">>, Value]}); + _ -> + set_resp_headers_list(Tail, Req, Acc#{Name => Value}) + end. + -spec resp_header(binary(), req()) -> binary() | undefined. resp_header(Name, Req) -> resp_header(Name, Req, undefined). @@ -775,7 +801,11 @@ inform(Status, Req) -> -spec inform(cowboy:http_status(), cowboy:http_headers(), req()) -> ok. inform(_, _, #{has_sent_resp := _}) -> - error(function_clause); %% @todo Better error message. + exit({response_error, response_already_sent, + 'The final response has already been sent.'}); +inform(_, #{<<"set-cookie">> := _}, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); inform(Status, Headers, Req) when is_integer(Status); is_binary(Status) -> cast({inform, Status, Headers}, Req). @@ -793,7 +823,11 @@ reply(Status, Headers, Req) -> -spec reply(cowboy:http_status(), cowboy:http_headers(), resp_body(), Req) -> Req when Req::req(). reply(_, _, _, #{has_sent_resp := _}) -> - error(function_clause); %% @todo Better error message. + exit({response_error, response_already_sent, + 'The final response has already been sent.'}); +reply(_, #{<<"set-cookie">> := _}, _, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); reply(Status, Headers, {sendfile, _, 0, _}, Req) when is_integer(Status); is_binary(Status) -> do_reply(Status, Headers#{ @@ -809,20 +843,26 @@ reply(Status, Headers, SendFile = {sendfile, _, Len, _}, Req) %% Neither status code must include a response body. (RFC7230 3.3) reply(Status, Headers, Body, Req) when Status =:= 204; Status =:= 304 -> - 0 = iolist_size(Body), - do_reply(Status, Headers, Body, Req); + do_reply_ensure_no_body(Status, Headers, Body, Req); reply(Status = <<"204",_/bits>>, Headers, Body, Req) -> - 0 = iolist_size(Body), - do_reply(Status, Headers, Body, Req); + do_reply_ensure_no_body(Status, Headers, Body, Req); reply(Status = <<"304",_/bits>>, Headers, Body, Req) -> - 0 = iolist_size(Body), - do_reply(Status, Headers, Body, Req); + do_reply_ensure_no_body(Status, Headers, Body, Req); reply(Status, Headers, Body, Req) when is_integer(Status); is_binary(Status) -> do_reply(Status, Headers#{ <<"content-length">> => integer_to_binary(iolist_size(Body)) }, Body, Req). +do_reply_ensure_no_body(Status, Headers, Body, Req) -> + case iolist_size(Body) of + 0 -> + do_reply(Status, Headers, Body, Req); + _ -> + exit({response_error, payload_too_large, + '204 and 304 responses must not include a body. (RFC7230 3.3)'}) + end. + %% Don't send any body for HEAD responses. While the protocol code is %% supposed to enforce this rule, we prefer to avoid copying too much %% data around if we can avoid it. @@ -843,16 +883,19 @@ stream_reply(Status, Req) -> -spec stream_reply(cowboy:http_status(), cowboy:http_headers(), Req) -> Req when Req::req(). stream_reply(_, _, #{has_sent_resp := _}) -> - error(function_clause); + exit({response_error, response_already_sent, + 'The final response has already been sent.'}); +stream_reply(_, #{<<"set-cookie">> := _}, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); %% 204 and 304 responses must NOT send a body. We therefore %% transform the call to a full response and expect the user %% to NOT call stream_body/3 afterwards. (RFC7230 3.3) -stream_reply(Status = 204, Headers=#{}, Req) -> +stream_reply(Status, Headers=#{}, Req) + when Status =:= 204; Status =:= 304 -> reply(Status, Headers, <<>>, Req); stream_reply(Status = <<"204",_/bits>>, Headers=#{}, Req) -> reply(Status, Headers, <<>>, Req); -stream_reply(Status = 304, Headers=#{}, Req) -> - reply(Status, Headers, <<>>, Req); stream_reply(Status = <<"304",_/bits>>, Headers=#{}, Req) -> reply(Status, Headers, <<>>, Req); stream_reply(Status, Headers=#{}, Req) when is_integer(Status); is_binary(Status) -> @@ -896,6 +939,9 @@ stream_events(Events, IsFin, Req=#{has_sent_resp := headers}) -> stream_body({data, self(), IsFin, cow_sse:events(Events)}, Req). -spec stream_trailers(cowboy:http_headers(), req()) -> ok. +stream_trailers(#{<<"set-cookie">> := _}, _) -> + exit({response_error, invalid_header, + 'Response cookies must be set using cowboy_req:set_resp_cookie/3,4.'}); stream_trailers(Trailers, Req=#{has_sent_resp := headers}) -> cast({trailers, Trailers}, Req). @@ -907,6 +953,9 @@ push(Path, Headers, Req) -> %% @todo Path, Headers, Opts, everything should be in proper binary, %% or normalized when creating the Req object. -spec push(iodata(), cowboy:http_headers(), req(), push_opts()) -> ok. +push(_, _, #{has_sent_resp := _}, _) -> + exit({response_error, response_already_sent, + 'The final response has already been sent.'}); push(Path, Headers, Req=#{scheme := Scheme0, host := Host0, port := Port0}, Opts) -> Method = maps:get(method, Opts, <<"GET">>), Scheme = maps:get(scheme, Opts, Scheme0), @@ -991,7 +1040,12 @@ filter([], Map, Errors) -> _ -> {error, Errors} end; filter([{Key, Constraints}|Tail], Map, Errors) -> - filter_constraints(Tail, Map, Errors, Key, maps:get(Key, Map), Constraints); + case maps:find(Key, Map) of + {ok, Value} -> + filter_constraints(Tail, Map, Errors, Key, Value, Constraints); + error -> + filter(Tail, Map, Errors#{Key => required}) + end; filter([{Key, Constraints, Default}|Tail], Map, Errors) -> case maps:find(Key, Map) of {ok, Value} -> diff --git a/src/cowboy_rest.erl b/src/cowboy_rest.erl index 468f9ab..9f30fcf 100644 --- a/src/cowboy_rest.erl +++ b/src/cowboy_rest.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -97,7 +97,7 @@ -optional_callbacks([forbidden/2]). -callback generate_etag(Req, State) - -> {binary() | {weak | strong, binary()}, Req, State} + -> {binary() | {weak | strong, binary()} | undefined, Req, State} when Req::cowboy_req:req(), State::any(). -optional_callbacks([generate_etag/2]). @@ -246,9 +246,6 @@ handler :: atom(), handler_state :: any(), - %% Allowed methods. Only used for OPTIONS requests. - allowed_methods :: [binary()] | undefined, - %% Media type. content_types_p = [] :: [{binary() | {binary(), binary(), [{binary(), binary()}] | '*'}, @@ -307,17 +304,17 @@ known_methods(Req, State=#state{method=Method}) -> Method =:= <<"POST">>; Method =:= <<"PUT">>; Method =:= <<"PATCH">>; Method =:= <<"DELETE">>; Method =:= <<"OPTIONS">> -> - next(Req, State, fun uri_too_long/2); + uri_too_long(Req, State); no_call -> - next(Req, State, 501); + respond(Req, State, 501); {stop, Req2, State2} -> terminate(Req2, State2); {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> switch_handler(Switch, Req2, State2); {List, Req2, State2} -> case lists:member(Method, List) of - true -> next(Req2, State2, fun uri_too_long/2); - false -> next(Req2, State2, 501) + true -> uri_too_long(Req2, State2); + false -> respond(Req2, State2, 501) end end. @@ -327,39 +324,26 @@ uri_too_long(Req, State) -> %% allowed_methods/2 should return a list of binary methods. allowed_methods(Req, State=#state{method=Method}) -> case call(Req, State, allowed_methods) of - no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">> -> - next(Req, State, fun malformed_request/2); - no_call when Method =:= <<"OPTIONS">> -> - next(Req, State#state{allowed_methods= - [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]}, - fun malformed_request/2); + no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">>; Method =:= <<"OPTIONS">> -> + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<"HEAD, GET, OPTIONS">>, Req), + malformed_request(Req2, State); no_call -> - method_not_allowed(Req, State, - [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]); + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<"HEAD, GET, OPTIONS">>, Req), + respond(Req2, State, 405); {stop, Req2, State2} -> terminate(Req2, State2); {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> switch_handler(Switch, Req2, State2); {List, Req2, State2} -> + Req3 = cowboy_req:set_resp_header(<<"allow">>, cow_http_hd:allow(List), Req2), case lists:member(Method, List) of - true when Method =:= <<"OPTIONS">> -> - next(Req2, State2#state{allowed_methods=List}, - fun malformed_request/2); true -> - next(Req2, State2, fun malformed_request/2); + malformed_request(Req3, State2); false -> - method_not_allowed(Req2, State2, List) + respond(Req3, State2, 405) end end. -method_not_allowed(Req, State, []) -> - Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), - respond(Req2, State, 405); -method_not_allowed(Req, State, Methods) -> - << ", ", Allow/binary >> = << << ", ", M/binary >> || M <- Methods >>, - Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), - respond(Req2, State, 405). - malformed_request(Req, State) -> expect(Req, State, malformed_request, false, fun is_authorized/2, 400). @@ -413,16 +397,10 @@ valid_entity_length(Req, State) -> %% If you need to add additional headers to the response at this point, %% you should do it directly in the options/2 call using set_resp_headers. -options(Req, State=#state{allowed_methods=Methods, method= <<"OPTIONS">>}) -> +options(Req, State=#state{method= <<"OPTIONS">>}) -> case call(Req, State, options) of - no_call when Methods =:= [] -> - Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), - respond(Req2, State, 200); no_call -> - << ", ", Allow/binary >> - = << << ", ", M/binary >> || M <- Methods >>, - Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), - respond(Req2, State, 200); + respond(Req, State, 200); {stop, Req2, State2} -> terminate(Req2, State2); {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> @@ -471,7 +449,7 @@ content_types_provided(Req, State) -> {[], Req2, State2} -> not_acceptable(Req2, State2); {CTP, Req2, State2} -> - CTP2 = [normalize_content_types(P) || P <- CTP], + CTP2 = [normalize_content_types(P, provide) || P <- CTP], State3 = State2#state{content_types_p=CTP2}, try cowboy_req:parse_header(<<"accept">>, Req2) of undefined -> @@ -491,10 +469,14 @@ content_types_provided(Req, State) -> end end. -normalize_content_types({ContentType, Callback}) +normalize_content_types({ContentType, Callback}, _) when is_binary(ContentType) -> {cow_http_hd:parse_content_type(ContentType), Callback}; -normalize_content_types(Normalized) -> +normalize_content_types(Normalized = {{Type, SubType, _}, _}, _) + when is_binary(Type), is_binary(SubType) -> + Normalized; +%% Wildcard for content_types_accepted. +normalize_content_types(Normalized = {'*', _}, accept) -> Normalized. prioritize_accept(Accept) -> @@ -1059,7 +1041,7 @@ accept_resource(Req, State) -> {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> switch_handler(Switch, Req2, State2); {CTA, Req2, State2} -> - CTA2 = [normalize_content_types(P) || P <- CTA], + CTA2 = [normalize_content_types(P, accept) || P <- CTA], try cowboy_req:parse_header(<<"content-type">>, Req2) of %% We do not match against the boundary parameter for multipart. {Type = <<"multipart">>, SubType, Params} -> @@ -1099,11 +1081,19 @@ process_content_type(Req, State=#state{method=Method, exists=Exists}, Fun) -> {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> switch_handler(Switch, Req2, State2); {true, Req2, State2} when Exists -> - next(Req2, State2, fun has_resp_body/2); + has_resp_body(Req2, State2); {true, Req2, State2} -> - next(Req2, State2, fun maybe_created/2); + maybe_created(Req2, State2); {false, Req2, State2} -> respond(Req2, State2, 400); + {{created, ResURL}, Req2, State2} when Method =:= <<"POST">> -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + respond(Req3, State2, 201); + {{see_other, ResURL}, Req2, State2} when Method =:= <<"POST">> -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + respond(Req3, State2, 303); {{true, ResURL}, Req2, State2} when Method =:= <<"POST">> -> Req3 = cowboy_req:set_resp_header( <<"location">>, ResURL, Req2), @@ -1188,6 +1178,7 @@ if_range(Req=#{headers := #{<<"if-range">> := _, <<"range">> := _}}, if_range(Req, State) -> range(Req, State). +%% @todo This can probably be moved to if_range directly. range(Req, State=#state{ranges_a=[]}) -> set_resp_body(Req, State); range(Req, State) -> @@ -1519,6 +1510,12 @@ generate_etag(Req, State=#state{etag=undefined}) -> case unsafe_call(Req, State, generate_etag) of no_call -> {undefined, Req, State#state{etag=no_call}}; + %% We allow the callback to return 'undefined' + %% to allow conditionally generating etags. We + %% handle 'undefined' the same as if the function + %% was not exported. + {undefined, Req2, State2} -> + {undefined, Req2, State2#state{etag=no_call}}; {Etag, Req2, State2} when is_binary(Etag) -> Etag2 = cow_http_hd:parse_etag(Etag), {Etag2, Req2, State2#state{etag=Etag2}}; @@ -1625,5 +1622,6 @@ error_terminate(Req, #state{handler=Handler, handler_state=HandlerState}, Class, erlang:raise(Class, Reason, Stacktrace). terminate(Req, #state{handler=Handler, handler_state=HandlerState}) -> + %% @todo I don't think the result is used anywhere? Result = cowboy_handler:terminate(normal, Req, HandlerState, Handler), {ok, Req, Result}. diff --git a/src/cowboy_router.erl b/src/cowboy_router.erl index 0b7fe41..393d82d 100644 --- a/src/cowboy_router.erl +++ b/src/cowboy_router.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_static.erl b/src/cowboy_static.erl index b0cf146..ce34b01 100644 --- a/src/cowboy_static.erl +++ b/src/cowboy_static.erl @@ -1,5 +1,5 @@ -%% Copyright (c) 2013-2017, Loïc Hoguin <[email protected]> -%% Copyright (c) 2011, Magnus Klaar <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> +%% Copyright (c) Magnus Klaar <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -29,7 +29,7 @@ -type extra_charset() :: {charset, module(), function()} | {charset, binary()}. -type extra_etag() :: {etag, module(), function()} | {etag, false}. -type extra_mimetypes() :: {mimetypes, module(), function()} - | {mimetypes, binary() | {binary(), binary(), [{binary(), binary()}]}}. + | {mimetypes, binary() | {binary(), binary(), '*' | [{binary(), binary()}]}}. -type extra() :: [extra_charset() | extra_etag() | extra_mimetypes()]. -type opts() :: {file | dir, string() | binary()} | {file | dir, string() | binary(), extra()} @@ -332,7 +332,7 @@ forbidden(Req, State) -> %% Detect the mimetype of the file. -spec content_types_provided(Req, State) - -> {[{binary(), get_file}], Req, State} + -> {[{binary() | {binary(), binary(), '*' | [{binary(), binary()}]}, get_file}], Req, State} when State::state(). content_types_provided(Req, State={Path, _, Extra}) when is_list(Extra) -> case lists:keyfind(mimetypes, 1, Extra) of @@ -347,7 +347,7 @@ content_types_provided(Req, State={Path, _, Extra}) when is_list(Extra) -> %% Detect the charset of the file. -spec charsets_provided(Req, State) - -> {[binary()], Req, State} + -> {[binary()], Req, State} | no_call when State::state(). charsets_provided(Req, State={Path, _, Extra}) -> case lists:keyfind(charset, 1, Extra) of @@ -381,7 +381,7 @@ resource_exists(Req, State) -> %% Generate an etag for the file. -spec generate_etag(Req, State) - -> {{strong | weak, binary()}, Req, State} + -> {{strong | weak, binary() | undefined}, Req, State} when State::state(). generate_etag(Req, State={Path, {_, #file_info{size=Size, mtime=Mtime}}, Extra}) -> @@ -408,7 +408,7 @@ last_modified(Req, State={_, {_, #file_info{mtime=Modified}}, _}) -> %% Stream the file. -spec get_file(Req, State) - -> {{sendfile, 0, non_neg_integer(), binary()}, Req, State} + -> {{sendfile, 0, non_neg_integer(), binary()} | binary(), Req, State} when State::state(). get_file(Req, State={Path, {direct, #file_info{size=Size}}, _}) -> {{sendfile, 0, Size, Path}, Req, State}; diff --git a/src/cowboy_stream.erl b/src/cowboy_stream.erl index 2dad6d0..6680bdc 100644 --- a/src/cowboy_stream.erl +++ b/src/cowboy_stream.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2015-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -49,6 +49,7 @@ -type reason() :: normal | switch_protocol | {internal_error, timeout | {error | exit | throw, any()}, human_reason()} | {socket_error, closed | atom(), human_reason()} + %% @todo Or cow_http3:error(). | {stream_error, cow_http2:error(), human_reason()} | {connection_error, cow_http2:error(), human_reason()} | {stop, cow_http2:frame() | {exit, any()}, human_reason()}. diff --git a/src/cowboy_stream_h.erl b/src/cowboy_stream_h.erl index f516f3d..3c3c084 100644 --- a/src/cowboy_stream_h.erl +++ b/src/cowboy_stream_h.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -138,7 +138,7 @@ info(StreamID, Info={'EXIT', Pid, {{request_error, Reason, _HumanReadable}, _}}, {error_response, Status, #{<<"content-length">> => <<"0">>}, <<>>}, stop ], State); -info(StreamID, Exit={'EXIT', Pid, {Reason, Stacktrace}}, State=#state{ref=Ref, pid=Pid}) -> +info(StreamID, Exit={'EXIT', Pid, Reason}, State=#state{ref=Ref, pid=Pid}) -> Commands0 = [{internal_error, Exit, 'Stream process crashed.'}], Commands = case Reason of normal -> Commands0; @@ -146,11 +146,15 @@ info(StreamID, Exit={'EXIT', Pid, {Reason, Stacktrace}}, State=#state{ref=Ref, p {shutdown, _} -> Commands0; _ -> [{log, error, "Ranch listener ~p, connection process ~p, stream ~p " - "had its request process ~p exit with reason " - "~999999p and stacktrace ~999999p~n", - [Ref, self(), StreamID, Pid, Reason, Stacktrace]} + "had its request process ~p exit with reason ~0p~n", + [Ref, self(), StreamID, Pid, Reason]} |Commands0] end, + %% @todo We are trying to send a 500 response before resetting + %% the stream. But due to the way the RESET_STREAM frame + %% works in QUIC the data may be lost. The problem is + %% known and a draft RFC exists at + %% https://www.ietf.org/id/draft-ietf-quic-reliable-stream-reset-03.html do_info(StreamID, Exit, [ {error_response, 500, #{<<"content-length">> => <<"0">>}, <<>>} |Commands], State); diff --git a/src/cowboy_sub_protocol.erl b/src/cowboy_sub_protocol.erl index 6714289..1f24d00 100644 --- a/src/cowboy_sub_protocol.erl +++ b/src/cowboy_sub_protocol.erl @@ -1,5 +1,5 @@ -%% Copyright (c) 2013-2017, Loïc Hoguin <[email protected]> -%% Copyright (c) 2013, James Fish <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> +%% Copyright (c) James Fish <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_sup.erl b/src/cowboy_sup.erl index d3ac3b0..224ef7d 100644 --- a/src/cowboy_sup.erl +++ b/src/cowboy_sup.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_tls.erl b/src/cowboy_tls.erl index c049ecb..6d0dcd3 100644 --- a/src/cowboy_tls.erl +++ b/src/cowboy_tls.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2015-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -33,19 +33,17 @@ start_link(Ref, Transport, Opts) -> -spec connection_process(pid(), ranch:ref(), module(), cowboy:opts()) -> ok. connection_process(Parent, Ref, Transport, Opts) -> - ProxyInfo = case maps:get(proxy_header, Opts, false) of - true -> - {ok, ProxyInfo0} = ranch:recv_proxy_header(Ref, 1000), - ProxyInfo0; - false -> - undefined - end, + ProxyInfo = get_proxy_info(Ref, Opts), {ok, Socket} = ranch:handshake(Ref), case ssl:negotiated_protocol(Socket) of {ok, <<"h2">>} -> init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, cowboy_http2); _ -> %% http/1.1 or no protocol negotiated. - init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, cowboy_http) + Protocol = case maps:get(alpn_default_protocol, Opts, http) of + http -> cowboy_http; + http2 -> cowboy_http2 + end, + init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) end. init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) -> @@ -54,3 +52,11 @@ init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) -> supervisor -> process_flag(trap_exit, true) end, Protocol:init(Parent, Ref, Socket, Transport, ProxyInfo, Opts). + +get_proxy_info(Ref, #{proxy_header := true}) -> + case ranch:recv_proxy_header(Ref, 1000) of + {ok, ProxyInfo} -> ProxyInfo; + {error, closed} -> exit({shutdown, closed}) + end; +get_proxy_info(_, _) -> + undefined. diff --git a/src/cowboy_tracer_h.erl b/src/cowboy_tracer_h.erl index 9a19ae1..91a431b 100644 --- a/src/cowboy_tracer_h.erl +++ b/src/cowboy_tracer_h.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/src/cowboy_websocket.erl b/src/cowboy_websocket.erl index e7d8f31..cb30c3f 100644 --- a/src/cowboy_websocket.erl +++ b/src/cowboy_websocket.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -69,6 +69,9 @@ active_n => pos_integer(), compress => boolean(), deflate_opts => cow_ws:deflate_opts(), + dynamic_buffer => false | {pos_integer(), pos_integer()}, + dynamic_buffer_initial_average => non_neg_integer(), + dynamic_buffer_initial_size => pos_integer(), idle_timeout => timeout(), max_frame_size => non_neg_integer() | infinity, req_filter => fun((cowboy_req:req()) -> map()), @@ -76,6 +79,14 @@ }. -export_type([opts/0]). +%% We don't want to reset the idle timeout too often, +%% so we don't reset it on data. Instead we reset the +%% number of ticks we have observed. We divide the +%% timeout value by a value and that value becomes +%% the number of ticks at which point we can drop +%% the connection. This value is the number of ticks. +-define(IDLE_TIMEOUT_TICKS, 10). + -record(state, { parent :: undefined | pid(), ref :: ranch:ref(), @@ -86,8 +97,14 @@ handler :: module(), key = undefined :: undefined | binary(), timeout_ref = undefined :: undefined | reference(), + timeout_num = 0 :: 0..?IDLE_TIMEOUT_TICKS, messages = undefined :: undefined | {atom(), atom(), atom()} | {atom(), atom(), atom(), atom()}, + + %% Dynamic buffer moving average and current buffer size. + dynamic_buffer_size = false :: pos_integer() | false, + dynamic_buffer_moving_average = 0 :: non_neg_integer(), + hibernate = false :: boolean(), frag_state = undefined :: cow_ws:frag_state(), frag_buffer = <<>> :: binary(), @@ -103,7 +120,8 @@ %% is trying to upgrade to the Websocket protocol. -spec is_upgrade_request(cowboy_req:req()) -> boolean(). -is_upgrade_request(#{version := 'HTTP/2', method := <<"CONNECT">>, protocol := Protocol}) -> +is_upgrade_request(#{version := Version, method := <<"CONNECT">>, protocol := Protocol}) + when Version =:= 'HTTP/2'; Version =:= 'HTTP/3' -> <<"websocket">> =:= cowboy_bstr:to_lower(Protocol); is_upgrade_request(Req=#{version := 'HTTP/1.1', method := <<"GET">>}) -> ConnTokens = cowboy_req:parse_header(<<"connection">>, Req, []), @@ -148,13 +166,13 @@ upgrade(Req0=#{version := Version}, Env, Handler, HandlerState, Opts) -> <<"connection">> => <<"upgrade">>, <<"upgrade">> => <<"websocket">> }, Req0), Env}; - %% Use a generic 400 error for HTTP/2. + %% Use 501 Not Implemented for HTTP/2 and HTTP/3 as recommended + %% by RFC9220 3 (WebSockets Upgrade over HTTP/3). {error, upgrade_required} -> - {ok, cowboy_req:reply(400, Req0), Env} + {ok, cowboy_req:reply(501, Req0), Env} catch _:_ -> %% @todo Probably log something here? %% @todo Test that we can have 2 /ws 400 status code in a row on the same connection. - %% @todo Does this even work? {ok, cowboy_req:reply(400, Req0), Env} end. @@ -260,7 +278,7 @@ websocket_handshake(State, Req=#{ref := Ref, pid := Pid, streamid := StreamID}, %% @todo We don't want date and server headers. Headers = cowboy_req:response_headers(#{}, Req), Pid ! {{Pid, StreamID}, {switch_protocol, Headers, ?MODULE, {State, HandlerState}}}, - takeover(Pid, Ref, {Pid, StreamID}, undefined, undefined, <<>>, + takeover(Pid, Ref, {Pid, StreamID}, undefined, #{}, <<>>, {State, HandlerState}). %% Connection process. @@ -285,17 +303,24 @@ websocket_handshake(State, Req=#{ref := Ref, pid := Pid, streamid := StreamID}, -spec takeover(pid(), ranch:ref(), inet:socket() | {pid(), cowboy_stream:streamid()}, module() | undefined, any(), binary(), {#state{}, any()}) -> no_return(). -takeover(Parent, Ref, Socket, Transport, _Opts, Buffer, - {State0=#state{handler=Handler}, HandlerState}) -> - %% @todo We should have an option to disable this behavior. - ranch:remove_connection(Ref), +takeover(Parent, Ref, Socket, Transport, Opts, Buffer, + {State0=#state{opts=WsOpts, handler=Handler, req=Req}, HandlerState}) -> + case Req of + #{version := 'HTTP/3'} -> ok; + %% @todo We should have an option to disable this behavior. + _ -> ranch:remove_connection(Ref) + end, Messages = case Transport of undefined -> undefined; _ -> Transport:messages() end, - State = loop_timeout(State0#state{parent=Parent, + State = set_idle_timeout(State0#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, - key=undefined, messages=Messages}), + opts=WsOpts#{dynamic_buffer => maps:get(dynamic_buffer, Opts, false)}, + key=undefined, messages=Messages, + %% Dynamic buffer only applies to HTTP/1.1 Websocket. + dynamic_buffer_size=init_dynamic_buffer_size(Opts), + dynamic_buffer_moving_average=maps:get(dynamic_buffer_initial_average, Opts, 0)}, 0), %% We call parse_header/3 immediately because there might be %% some data in the buffer that was sent along with the handshake. %% While it is not allowed by the protocol to send frames immediately, @@ -306,6 +331,12 @@ takeover(Parent, Ref, Socket, Transport, _Opts, Buffer, false -> after_init(State, HandlerState, #ps_header{buffer=Buffer}) end. +-include("cowboy_dynamic_buffer.hrl"). + +%% @todo Implement early socket error detection. +maybe_socket_error(_, _) -> + ok. + after_init(State=#state{active=true}, HandlerState, ParseState) -> %% Enable active,N for HTTP/1.1, and auto read_body for HTTP/2. %% We must do this only after calling websocket_init/1 (if any) @@ -327,7 +358,7 @@ after_init(State, HandlerState, ParseState) -> setopts_active(#state{transport=undefined}) -> ok; setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> - N = maps:get(active_n, Opts, 100), + N = maps:get(active_n, Opts, 1), Transport:setopts(Socket, [{active, N}]). maybe_read_body(#state{socket=Stream={Pid, _}, transport=undefined, active=true}) -> @@ -369,28 +400,41 @@ before_loop(State=#state{hibernate=true}, HandlerState, ParseState) -> before_loop(State, HandlerState, ParseState) -> loop(State, HandlerState, ParseState). --spec loop_timeout(#state{}) -> #state{}. -loop_timeout(State=#state{opts=Opts, timeout_ref=PrevRef}) -> +-spec set_idle_timeout(#state{}, 0..?IDLE_TIMEOUT_TICKS) -> #state{}. + +%% @todo Do we really need this for HTTP/2? +set_idle_timeout(State=#state{opts=Opts, timeout_ref=PrevRef}, TimeoutNum) -> + %% Most of the time we don't need to cancel the timer since it + %% will have triggered already. But this call is harmless so + %% it is kept to simplify the code as we do need to cancel when + %% options are changed dynamically. _ = case PrevRef of undefined -> ignore; - PrevRef -> erlang:cancel_timer(PrevRef) + PrevRef -> erlang:cancel_timer(PrevRef, [{async, true}, {info, false}]) end, case maps:get(idle_timeout, Opts, 60000) of infinity -> - State#state{timeout_ref=undefined}; + State#state{timeout_ref=undefined, timeout_num=TimeoutNum}; Timeout -> - TRef = erlang:start_timer(Timeout, self(), ?MODULE), - State#state{timeout_ref=TRef} + TRef = erlang:start_timer(Timeout div ?IDLE_TIMEOUT_TICKS, self(), ?MODULE), + State#state{timeout_ref=TRef, timeout_num=TimeoutNum} end. +-define(reset_idle_timeout(State), State#state{timeout_num=0}). + +tick_idle_timeout(State=#state{timeout_num=?IDLE_TIMEOUT_TICKS}, HandlerState, _) -> + websocket_close(State, HandlerState, timeout); +tick_idle_timeout(State=#state{timeout_num=TimeoutNum}, HandlerState, ParseState) -> + before_loop(set_idle_timeout(State, TimeoutNum + 1), HandlerState, ParseState). + -spec loop(#state{}, any(), parse_state()) -> no_return(). loop(State=#state{parent=Parent, socket=Socket, messages=Messages, timeout_ref=TRef}, HandlerState, ParseState) -> receive %% Socket messages. (HTTP/1.1) {OK, Socket, Data} when OK =:= element(1, Messages) -> - State2 = loop_timeout(State), - parse(State2, HandlerState, ParseState, Data); + State1 = maybe_resize_buffer(State, Data), + parse(?reset_idle_timeout(State1), HandlerState, ParseState, Data); {Closed, Socket} when Closed =:= element(2, Messages) -> terminate(State, HandlerState, {error, closed}); {Error, Socket, Reason} when Error =:= element(3, Messages) -> @@ -403,18 +447,16 @@ loop(State=#state{parent=Parent, socket=Socket, messages=Messages, %% Body reading messages. (HTTP/2) {request_body, _Ref, nofin, Data} -> maybe_read_body(State), - State2 = loop_timeout(State), - parse(State2, HandlerState, ParseState, Data); + parse(?reset_idle_timeout(State), HandlerState, ParseState, Data); %% @todo We need to handle this case as if it was an {error, closed} %% but not before we finish processing frames. We probably should have %% a check in before_loop to let us stop looping if a flag is set. {request_body, _Ref, fin, _, Data} -> maybe_read_body(State), - State2 = loop_timeout(State), - parse(State2, HandlerState, ParseState, Data); + parse(?reset_idle_timeout(State), HandlerState, ParseState, Data); %% Timeouts. {timeout, TRef, ?MODULE} -> - websocket_close(State, HandlerState, timeout); + tick_idle_timeout(State, HandlerState, ParseState); {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> before_loop(State, HandlerState, ParseState); %% System messages. @@ -458,12 +500,16 @@ parse_header(State=#state{opts=Opts, frag_state=FragState, extensions=Extensions websocket_close(State, HandlerState, {error, badframe}) end. -parse_payload(State=#state{frag_state=FragState, utf8_state=Incomplete, extensions=Extensions}, +parse_payload(State=#state{opts=Opts, frag_state=FragState, utf8_state=Incomplete, extensions=Extensions}, HandlerState, ParseState=#ps_payload{ type=Type, len=Len, mask_key=MaskKey, rsv=Rsv, unmasked=Unmasked, unmasked_len=UnmaskedLen}, Data) -> + MaxFrameSize = case maps:get(max_frame_size, Opts, infinity) of + infinity -> infinity; + MaxFrameSize0 -> MaxFrameSize0 - UnmaskedLen + end, case cow_ws:parse_payload(Data, MaskKey, Incomplete, UnmaskedLen, - Type, Len, FragState, Extensions, Rsv) of + Type, Len, FragState, Extensions#{max_inflate_size => MaxFrameSize}, Rsv) of {ok, CloseCode, Payload, Utf8State, Rest} -> dispatch_frame(State#state{utf8_state=Utf8State}, HandlerState, ParseState#ps_payload{unmasked= <<Unmasked/binary, Payload/binary>>, @@ -593,13 +639,16 @@ commands([{active, Active}|Tail], State0=#state{active=Active0}, Data) when is_b commands(Tail, State#state{active=Active}, Data); commands([{deflate, Deflate}|Tail], State, Data) when is_boolean(Deflate) -> commands(Tail, State#state{deflate=Deflate}, Data); -commands([{set_options, SetOpts}|Tail], State0=#state{opts=Opts}, Data) -> - State = case SetOpts of - #{idle_timeout := IdleTimeout} -> - loop_timeout(State0#state{opts=Opts#{idle_timeout => IdleTimeout}}); - _ -> - State0 - end, +commands([{set_options, SetOpts}|Tail], State0, Data) -> + State = maps:fold(fun + (idle_timeout, IdleTimeout, StateF=#state{opts=Opts}) -> + %% We reset the number of ticks when changing the idle_timeout option. + set_idle_timeout(StateF#state{opts=Opts#{idle_timeout => IdleTimeout}}, 0); + (max_frame_size, MaxFrameSize, StateF=#state{opts=Opts}) -> + StateF#state{opts=Opts#{max_frame_size => MaxFrameSize}}; + (_, _, StateF) -> + StateF + end, State0, SetOpts), commands(Tail, State, Data); commands([{shutdown_reason, ShutdownReason}|Tail], State, Data) -> commands(Tail, State#state{shutdown_reason=ShutdownReason}, Data); diff --git a/src/cowboy_webtransport.erl b/src/cowboy_webtransport.erl new file mode 100644 index 0000000..8c8ca39 --- /dev/null +++ b/src/cowboy_webtransport.erl @@ -0,0 +1,292 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% @todo To enable WebTransport the following options need to be set: +%% +%% QUIC: +%% - max_datagram_frame_size > 0 +%% +%% HTTP/3: +%% - SETTINGS_H3_DATAGRAM = 1 +%% - SETTINGS_ENABLE_CONNECT_PROTOCOL = 1 +%% - SETTINGS_WT_MAX_SESSIONS >= 1 + +%% Cowboy supports versions 07 through 13 of the WebTransport drafts. +%% Cowboy also has some compatibility with version 02. +%% +%% WebTransport CONNECT requests go through cowboy_stream as normal +%% and then an upgrade/switch_protocol is issued (just like Websocket). +%% After that point none of the events go through cowboy_stream except +%% the final terminate event. The request process becomes the process +%% handling all events in the WebTransport session. +%% +%% WebTransport sessions can be ended via a command, via a crash or +%% exit, via the closing of the connection (client or server inititated), +%% via the client ending the session (mirroring the command) or via +%% the client terminating the CONNECT stream. +-module(cowboy_webtransport). + +-export([upgrade/4]). +-export([upgrade/5]). + +%% cowboy_stream. +-export([info/3]). +-export([terminate/3]). + +-type stream_type() :: unidi | bidi. +-type open_stream_ref() :: any(). + +-type event() :: + {stream_open, cow_http3:stream_id(), stream_type()} | + {opened_stream_id, open_stream_ref(), cow_http3:stream_id()} | + {stream_data, cow_http3:stream_id(), cow_http:fin(), binary()} | + {datagram, binary()} | + close_initiated. + +-type commands() :: [ + {open_stream, open_stream_ref(), stream_type(), iodata()} | + {close_stream, cow_http3:stream_id(), cow_http3:wt_app_error_code()} | + {send, cow_http3:stream_id() | datagram, iodata()} | + initiate_close | + close | + {close, cow_http3:wt_app_error_code()} | + {close, cow_http3:wt_app_error_code(), iodata()} +]. +-export_type([commands/0]). + +-type call_result(State) :: {commands(), State} | {commands(), State, hibernate}. + +-callback init(Req, any()) + -> {ok | module(), Req, any()} + | {module(), Req, any(), any()} + when Req::cowboy_req:req(). + +-callback webtransport_init(State) + -> call_result(State) when State::any(). +-optional_callbacks([webtransport_init/1]). + +-callback webtransport_handle(event(), State) + -> call_result(State) when State::any(). +-optional_callbacks([webtransport_handle/2]). + +-callback webtransport_info(any(), State) + -> call_result(State) when State::any(). +-optional_callbacks([webtransport_info/2]). + +-callback terminate(any(), cowboy_req:req(), any()) -> ok. +-optional_callbacks([terminate/3]). + +-type opts() :: #{ + req_filter => fun((cowboy_req:req()) -> map()) +}. +-export_type([opts/0]). + +-record(state, { + id :: cow_http3:stream_id(), + parent :: pid(), + opts = #{} :: opts(), + handler :: module(), + hibernate = false :: boolean(), + req = #{} :: map() +}). + +%% This function mirrors a similar function for Websocket. + +-spec is_upgrade_request(cowboy_req:req()) -> boolean(). + +is_upgrade_request(#{version := Version, method := <<"CONNECT">>, protocol := Protocol}) + when Version =:= 'HTTP/3' -> + %% @todo scheme MUST BE "https" + <<"webtransport">> =:= cowboy_bstr:to_lower(Protocol); + +is_upgrade_request(_) -> + false. + +%% Stream process. + +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). + +upgrade(Req, Env, Handler, HandlerState) -> + upgrade(Req, Env, Handler, HandlerState, #{}). + +-spec upgrade(Req, Env, module(), any(), opts()) + -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). + +%% @todo Immediately crash if a response has already been sent. +upgrade(Req=#{version := 'HTTP/3', pid := Pid, streamid := StreamID}, Env, Handler, HandlerState, Opts) -> + FilteredReq = case maps:get(req_filter, Opts, undefined) of + undefined -> maps:with([method, version, scheme, host, port, path, qs, peer], Req); + FilterFun -> FilterFun(Req) + end, + State = #state{id=StreamID, parent=Pid, opts=Opts, handler=Handler, req=FilteredReq}, + %% @todo Must ensure the relevant settings are enabled (QUIC and H3). + %% Either we check them BEFORE, or we check them when the handler + %% is OK to initiate a webtransport session. Probably need to + %% check them BEFORE as we need to become (takeover) the webtransport process + %% after we are done with the upgrade. Maybe in cow_http3_machine but + %% it doesn't have QUIC settings currently (max_datagram_size). + case is_upgrade_request(Req) of + true -> + Headers = cowboy_req:response_headers(#{}, Req), + Pid ! {{Pid, StreamID}, {switch_protocol, Headers, ?MODULE, + #{session_pid => self()}}}, + webtransport_init(State, HandlerState); + %% Use 501 Not Implemented to mirror the recommendation in + %% by RFC9220 3 (WebSockets Upgrade over HTTP/3). + false -> + %% @todo I don't think terminate will be called. + {ok, cowboy_req:reply(501, Req), Env} + end. + +webtransport_init(State=#state{handler=Handler}, HandlerState) -> + case erlang:function_exported(Handler, webtransport_init, 1) of + true -> handler_call(State, HandlerState, webtransport_init, undefined); + false -> before_loop(State, HandlerState) + end. + +before_loop(State=#state{hibernate=true}, HandlerState) -> + proc_lib:hibernate(?MODULE, loop, [State#state{hibernate=false}, HandlerState]); +before_loop(State, HandlerState) -> + loop(State, HandlerState). + +-spec loop(#state{}, any()) -> no_return(). + +loop(State=#state{id=SessionID, parent=Parent}, HandlerState) -> + receive + {'$webtransport_event', SessionID, Event={closed, _, _}} -> + terminate_proc(State, HandlerState, Event); + {'$webtransport_event', SessionID, Event=closed_abruptly} -> + terminate_proc(State, HandlerState, Event); + {'$webtransport_event', SessionID, Event} -> + handler_call(State, HandlerState, webtransport_handle, Event); + %% Timeouts. +%% @todo idle_timeout +% {timeout, TRef, ?MODULE} -> +% tick_idle_timeout(State, HandlerState, ParseState); +% {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> +% before_loop(State, HandlerState, ParseState); + %% System messages. + {'EXIT', Parent, Reason} -> + %% @todo We should exit gracefully. + exit(Reason); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], + {State, HandlerState}); + %% Calls from supervisor module. + {'$gen_call', From, Call} -> + cowboy_children:handle_supervisor_call(Call, From, [], ?MODULE), + before_loop(State, HandlerState); + Message -> + handler_call(State, HandlerState, webtransport_info, Message) + end. + +handler_call(State=#state{handler=Handler}, HandlerState, Callback, Message) -> + try case Callback of + webtransport_init -> Handler:webtransport_init(HandlerState); + _ -> Handler:Callback(Message, HandlerState) + end of + {Commands, HandlerState2} when is_list(Commands) -> + handler_call_result(State, HandlerState2, Commands); + {Commands, HandlerState2, hibernate} when is_list(Commands) -> + handler_call_result(State#state{hibernate=true}, HandlerState2, Commands) + catch Class:Reason:Stacktrace -> + %% @todo Do we need to send a close? Let cowboy_http3 detect and handle it? + handler_terminate(State, HandlerState, {crash, Class, Reason}), + erlang:raise(Class, Reason, Stacktrace) + end. + +handler_call_result(State0, HandlerState, Commands) -> + case commands(Commands, State0, ok, []) of + {ok, State} -> + before_loop(State, HandlerState); + {stop, State} -> + terminate_proc(State, HandlerState, stop) + end. + +%% We accumulate the commands that must be sent to the connection process +%% because we want to send everything into one message. Other commands are +%% processed immediately. + +commands([], State, Res, []) -> + {Res, State}; +commands([], State=#state{id=SessionID, parent=Pid}, Res, Commands) -> + Pid ! {'$webtransport_commands', SessionID, lists:reverse(Commands)}, + {Res, State}; +%% {open_stream, OpenStreamRef, StreamType, InitialData}. +commands([Command={open_stream, _, _, _}|Tail], State, Res, Acc) -> + commands(Tail, State, Res, [Command|Acc]); +%% {close_stream, StreamID, Code}. +commands([Command={close_stream, _, _}|Tail], State, Res, Acc) -> + commands(Tail, State, Res, [Command|Acc]); +%% @todo We must reject send to a remote unidi stream. +%% {send, StreamID | datagram, Data}. +commands([Command={send, _, _}|Tail], State, Res, Acc) -> + commands(Tail, State, Res, [Command|Acc]); +%% {send, StreamID, IsFin, Data}. +commands([Command={send, _, _, _}|Tail], State, Res, Acc) -> + commands(Tail, State, Res, [Command|Acc]); +%% initiate_close - DRAIN_WT_SESSION +commands([Command=initiate_close|Tail], State, Res, Acc) -> + commands(Tail, State, Res, [Command|Acc]); +%% close | {close, Code} | {close, Code, Msg} - CLOSE_WT_SESSION +%% @todo At this point the handler must not issue stream or send commands. +commands([Command=close|Tail], State, _, Acc) -> + commands(Tail, State, stop, [Command|Acc]); +commands([Command={close, _}|Tail], State, _, Acc) -> + commands(Tail, State, stop, [Command|Acc]); +commands([Command={close, _, _}|Tail], State, _, Acc) -> + commands(Tail, State, stop, [Command|Acc]). +%% @todo A set_options command could be useful to increase the number of allowed streams +%% or other forms of flow control. Alternatively a flow command. Or both. +%% @todo A shutdown_reason command could be useful for the same reasons as Websocekt. + +-spec terminate_proc(_, _, _) -> no_return(). + +terminate_proc(State, HandlerState, Reason) -> + handler_terminate(State, HandlerState, Reason), + %% @todo This is what should be done if shutdown_reason gets implemented. +% case Shutdown of +% normal -> exit(normal); +% _ -> exit({shutdown, Shutdown}) +% end. + exit(normal). + +handler_terminate(#state{handler=Handler, req=Req}, HandlerState, Reason) -> + cowboy_handler:terminate(Reason, Req, HandlerState, Handler). + +%% cowboy_stream callbacks. +%% +%% We shortcut stream handlers but still need to process some events +%% such as process exiting or termination. We implement the relevant +%% callbacks here. Note that as far as WebTransport is concerned, +%% receiving stream data here would be an error therefore the data +%% callback is not implemented. +%% +%% @todo Better type than map() for the cowboy_stream state. + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::map(). + +info(StreamID, Msg, WTState=#{stream_state := StreamState0}) -> + {Commands, StreamState} = cowboy_stream:info(StreamID, Msg, StreamState0), + {Commands, WTState#{stream_state => StreamState}}. + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), map()) + -> any(). + +terminate(StreamID, Reason, #{stream_state := StreamState}) -> + cowboy_stream:terminate(StreamID, Reason, StreamState). diff --git a/test/compress_SUITE.erl b/test/compress_SUITE.erl index a25c427..9da9769 100644 --- a/test/compress_SUITE.erl +++ b/test/compress_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -23,12 +23,20 @@ %% ct. all() -> - [ + All = [ {group, http_compress}, {group, https_compress}, {group, h2_compress}, - {group, h2c_compress} - ]. + {group, h2c_compress}, + {group, h3_compress} + ], + %% Don't run HTTP/3 tests on Windows for now. + case os:type() of + {win32, _} -> + All -- [{group, h3_compress}]; + _ -> + All + end. groups() -> cowboy_test:common_groups(ct_helper:all(?MODULE)). @@ -37,7 +45,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Routes. @@ -67,7 +75,7 @@ gzip_accept_encoding_malformed(Config) -> {200, Headers, _} = do_get("/reply/large", [{<<"accept-encoding">>, <<";">>}], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -76,7 +84,7 @@ gzip_accept_encoding_missing(Config) -> {200, Headers, _} = do_get("/reply/large", [], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -85,7 +93,7 @@ gzip_accept_encoding_no_gzip(Config) -> {200, Headers, _} = do_get("/reply/large", [{<<"accept-encoding">>, <<"compress">>}], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -94,7 +102,7 @@ gzip_accept_encoding_not_supported(Config) -> {200, Headers, _} = do_get("/reply/large", [{<<"accept-encoding">>, <<"application/gzip">>}], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -105,7 +113,18 @@ gzip_reply_content_encoding(Config) -> %% We set the content-encoding to compress; without actually compressing. {_, <<"compress">>} = lists:keyfind(<<"content-encoding">>, 1, Headers), %% The reply didn't include a vary header. - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), + ok. + +gzip_reply_etag(Config) -> + doc("Reply with etag header; get an uncompressed response."), + {200, Headers, _} = do_get("/reply/etag", + [{<<"accept-encoding">>, <<"gzip">>}], Config), + %% We set a strong etag. + {_, <<"\"STRONK\"">>} = lists:keyfind(<<"etag">>, 1, Headers), + %% The reply didn't include a vary header. + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -125,7 +144,7 @@ gzip_reply_sendfile(Config) -> {200, Headers, Body} = do_get("/reply/sendfile", [{<<"accept-encoding">>, <<"gzip">>}], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), ct:log("Body received:~n~p~n", [Body]), ok. @@ -134,7 +153,7 @@ gzip_reply_small_body(Config) -> {200, Headers, _} = do_get("/reply/small", [{<<"accept-encoding">>, <<"gzip">>}], Config), false = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), {_, <<"100">>} = lists:keyfind(<<"content-length">>, 1, Headers), ok. @@ -170,7 +189,16 @@ gzip_stream_reply_content_encoding(Config) -> {200, Headers, Body} = do_get("/stream_reply/content-encoding", [{<<"accept-encoding">>, <<"gzip">>}], Config), {_, <<"compress">>} = lists:keyfind(<<"content-encoding">>, 1, Headers), - false = lists:keyfind(<<"vary">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), + 100000 = iolist_size(Body), + ok. + +gzip_stream_reply_etag(Config) -> + doc("Stream reply with etag header; get an uncompressed response."), + {200, Headers, Body} = do_get("/stream_reply/etag", + [{<<"accept-encoding">>, <<"gzip">>}], Config), + {_, <<"\"STRONK\"">>} = lists:keyfind(<<"etag">>, 1, Headers), + {_, <<"accept-encoding">>} = lists:keyfind(<<"vary">>, 1, Headers), 100000 = iolist_size(Body), ok. diff --git a/test/cowboy_ct_hook.erl b/test/cowboy_ct_hook.erl index 7d5a889..46e56a2 100644 --- a/test/cowboy_ct_hook.erl +++ b/test/cowboy_ct_hook.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2014-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/test/cowboy_test.erl b/test/cowboy_test.erl index 7ebe618..541e8f9 100644 --- a/test/cowboy_test.erl +++ b/test/cowboy_test.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2014-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -27,84 +27,149 @@ init_http(Ref, ProtoOpts, Config) -> init_https(Ref, ProtoOpts, Config) -> Opts = ct_helper:get_certs_from_ets(), - {ok, _} = cowboy:start_tls(Ref, Opts ++ [{port, 0}], ProtoOpts), + {ok, _} = cowboy:start_tls(Ref, Opts ++ [{port, 0}, {verify, verify_none}], ProtoOpts), Port = ranch:get_port(Ref), [{ref, Ref}, {type, ssl}, {protocol, http}, {port, Port}, {opts, Opts}|Config]. init_http2(Ref, ProtoOpts, Config) -> Opts = ct_helper:get_certs_from_ets(), - {ok, _} = cowboy:start_tls(Ref, Opts ++ [{port, 0}], ProtoOpts), + {ok, _} = cowboy:start_tls(Ref, Opts ++ [{port, 0}, {verify, verify_none}], ProtoOpts), Port = ranch:get_port(Ref), [{ref, Ref}, {type, ssl}, {protocol, http2}, {port, Port}, {opts, Opts}|Config]. +%% @todo This will probably require TransOpts as argument. +init_http3(Ref, ProtoOpts, Config) -> + %% @todo Quicer does not currently support non-file cert/key, + %% so we use quicer test certificates for now. + %% @todo Quicer also does not support cacerts which means + %% we currently have no authentication based security. + DataDir = filename:dirname(filename:dirname(config(data_dir, Config))) + ++ "/rfc9114_SUITE_data", + TransOpts = #{ + socket_opts => [ + {certfile, DataDir ++ "/server.pem"}, + {keyfile, DataDir ++ "/server.key"} + ] + }, + {ok, Listener} = cowboy:start_quic(Ref, TransOpts, ProtoOpts), + {ok, {_, Port}} = quicer:sockname(Listener), + %% @todo Keep listener information around in a better place. + persistent_term:put({cowboy_test_quic, Ref}, Listener), + [{ref, Ref}, {type, quic}, {protocol, http3}, {port, Port}, {opts, TransOpts}|Config]. + +stop_group(Ref) -> + case persistent_term:get({cowboy_test_quic, Ref}, undefined) of + undefined -> + cowboy:stop_listener(Ref); + Listener -> + quicer:close_listener(Listener) + end. + %% Common group of listeners used by most suites. common_all() -> - [ + All = [ {group, http}, {group, https}, {group, h2}, {group, h2c}, + {group, h3}, {group, http_compress}, {group, https_compress}, {group, h2_compress}, - {group, h2c_compress} - ]. + {group, h2c_compress}, + {group, h3_compress} + ], + %% Don't run HTTP/3 tests on Windows for now. + case os:type() of + {win32, _} -> + All -- [{group, h3}, {group, h3_compress}]; + _ -> + All + end. common_groups(Tests) -> - Opts = case os:getenv("NO_PARALLEL") of - false -> [parallel]; - _ -> [] + Parallel = case os:getenv("NO_PARALLEL") of + false -> parallel; + _ -> no_parallel end, - [ + common_groups(Tests, Parallel). + +common_groups(Tests, Parallel) -> + Opts = case Parallel of + parallel -> [parallel]; + no_parallel -> [] + end, + Groups = [ {http, Opts, Tests}, {https, Opts, Tests}, {h2, Opts, Tests}, {h2c, Opts, Tests}, + {h3, Opts, Tests}, {http_compress, Opts, Tests}, {https_compress, Opts, Tests}, {h2_compress, Opts, Tests}, - {h2c_compress, Opts, Tests} - ]. + {h2c_compress, Opts, Tests}, + {h3_compress, Opts, Tests} + ], + %% Don't run HTTP/3 tests on Windows for now. + case os:type() of + {win32, _} -> + Groups -- [{h3, Opts, Tests}, {h3_compress, Opts, Tests}]; + _ -> + Groups + end. + +init_common_groups(Name, Config, Mod) -> + init_common_groups(Name, Config, Mod, #{}). -init_common_groups(Name = http, Config, Mod) -> - init_http(Name, #{ +init_common_groups(Name = http, Config, Mod, ProtoOpts) -> + init_http(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)} }, [{flavor, vanilla}|Config]); -init_common_groups(Name = https, Config, Mod) -> - init_https(Name, #{ +init_common_groups(Name = https, Config, Mod, ProtoOpts) -> + init_https(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)} }, [{flavor, vanilla}|Config]); -init_common_groups(Name = h2, Config, Mod) -> - init_http2(Name, #{ +init_common_groups(Name = h2, Config, Mod, ProtoOpts) -> + init_http2(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)} }, [{flavor, vanilla}|Config]); -init_common_groups(Name = h2c, Config, Mod) -> - Config1 = init_http(Name, #{ +init_common_groups(Name = h2c, Config, Mod, ProtoOpts) -> + Config1 = init_http(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)} }, [{flavor, vanilla}|Config]), lists:keyreplace(protocol, 1, Config1, {protocol, http2}); -init_common_groups(Name = http_compress, Config, Mod) -> - init_http(Name, #{ +init_common_groups(Name = h3, Config, Mod, ProtoOpts) -> + init_http3(Name, ProtoOpts#{ + env => #{dispatch => Mod:init_dispatch(Config)} + }, [{flavor, vanilla}|Config]); +init_common_groups(Name = http_compress, Config, Mod, ProtoOpts) -> + init_http(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)}, stream_handlers => [cowboy_compress_h, cowboy_stream_h] }, [{flavor, compress}|Config]); -init_common_groups(Name = https_compress, Config, Mod) -> - init_https(Name, #{ +init_common_groups(Name = https_compress, Config, Mod, ProtoOpts) -> + init_https(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)}, stream_handlers => [cowboy_compress_h, cowboy_stream_h] }, [{flavor, compress}|Config]); -init_common_groups(Name = h2_compress, Config, Mod) -> - init_http2(Name, #{ +init_common_groups(Name = h2_compress, Config, Mod, ProtoOpts) -> + init_http2(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)}, stream_handlers => [cowboy_compress_h, cowboy_stream_h] }, [{flavor, compress}|Config]); -init_common_groups(Name = h2c_compress, Config, Mod) -> - Config1 = init_http(Name, #{ +init_common_groups(Name = h2c_compress, Config, Mod, ProtoOpts) -> + Config1 = init_http(Name, ProtoOpts#{ env => #{dispatch => Mod:init_dispatch(Config)}, stream_handlers => [cowboy_compress_h, cowboy_stream_h] }, [{flavor, compress}|Config]), - lists:keyreplace(protocol, 1, Config1, {protocol, http2}). + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_common_groups(Name = h3_compress, Config, Mod, ProtoOpts) -> + init_http3(Name, ProtoOpts#{ + env => #{dispatch => Mod:init_dispatch(Config)}, + stream_handlers => [cowboy_compress_h, cowboy_stream_h] + }, [{flavor, compress}|Config]). %% Support functions for testing using Gun. @@ -112,10 +177,14 @@ gun_open(Config) -> gun_open(Config, #{}). gun_open(Config, Opts) -> + TlsOpts = case proplists:get_value(no_cert, Config, false) of + true -> [{verify, verify_none}]; + false -> ct_helper:get_certs_from_ets() %% @todo Wrong in current quicer. + end, {ok, ConnPid} = gun:open("localhost", config(port, Config), Opts#{ retry => 0, transport => config(type, Config), - tls_opts => proplists:get_value(tls_opts, Config, []), + tls_opts => TlsOpts, protocols => [config(protocol, Config)] }), ConnPid. @@ -153,6 +222,12 @@ raw_recv_head(Socket, Transport, Buffer) -> Buffer end. +raw_recv_rest({raw_client, _, _}, Length, Buffer) when Length =:= byte_size(Buffer) -> + Buffer; +raw_recv_rest({raw_client, Socket, Transport}, Length, Buffer) when Length > byte_size(Buffer) -> + {ok, Data} = Transport:recv(Socket, Length - byte_size(Buffer), 10000), + << Buffer/binary, Data/binary >>. + raw_recv({raw_client, Socket, Transport}, Length, Timeout) -> Transport:recv(Socket, Length, Timeout). diff --git a/test/decompress_SUITE.erl b/test/decompress_SUITE.erl new file mode 100644 index 0000000..f1eb13a --- /dev/null +++ b/test/decompress_SUITE.erl @@ -0,0 +1,421 @@ +%% Copyright (c) jdamanalo <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(decompress_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). +-import(cowboy_test, [gun_open/1]). + +%% ct. + +all() -> + cowboy_test:common_all(). + +groups() -> + cowboy_test:common_groups(ct_helper:all(?MODULE)). + +init_per_group(Name = http, Config) -> + cowboy_test:init_http(Name, init_plain_opts(Config), Config); +init_per_group(Name = https, Config) -> + cowboy_test:init_http(Name, init_plain_opts(Config), Config); +init_per_group(Name = h2, Config) -> + cowboy_test:init_http2(Name, init_plain_opts(Config), Config); +init_per_group(Name = h2c, Config) -> + Config1 = cowboy_test:init_http(Name, init_plain_opts(Config), Config), + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3, Config) -> + cowboy_test:init_http3(Name, init_plain_opts(Config), Config); +init_per_group(Name = http_compress, Config) -> + cowboy_test:init_http(Name, init_compress_opts(Config), Config); +init_per_group(Name = https_compress, Config) -> + cowboy_test:init_http(Name, init_compress_opts(Config), Config); +init_per_group(Name = h2_compress, Config) -> + cowboy_test:init_http2(Name, init_compress_opts(Config), Config); +init_per_group(Name = h2c_compress, Config) -> + Config1 = cowboy_test:init_http(Name, init_compress_opts(Config), Config), + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3_compress, Config) -> + cowboy_test:init_http3(Name, init_compress_opts(Config), Config). + +end_per_group(Name, _) -> + cowboy:stop_listener(Name). + +init_plain_opts(Config) -> + #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config))}, + stream_handlers => [cowboy_decompress_h, cowboy_stream_h] + }. + +init_compress_opts(Config) -> + #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config))}, + stream_handlers => [cowboy_decompress_h, cowboy_compress_h, cowboy_stream_h] + }. + +init_routes(_) -> + [{'_', [ + {"/echo/:what", decompress_h, echo}, + {"/test/:what", decompress_h, test} + ]}]. + +%% Internal. + +do_post(Path, ReqHeaders, Body, Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, Path, ReqHeaders, Body), + {response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref), + {ok, ResponseBody} = case IsFin of + nofin -> gun:await_body(ConnPid, Ref); + fin -> {ok, <<>>} + end, + gun:close(ConnPid), + {Status, RespHeaders, ResponseBody}. + +create_gzip_bomb() -> + Z = zlib:open(), + zlib:deflateInit(Z, 9, deflated, 31, 8, default), + %% 1000 chunks of 100000 zeroes (100MB). + Bomb = do_create_gzip_bomb(Z, 1000), + zlib:deflateEnd(Z), + zlib:close(Z), + iolist_to_binary(Bomb). + +do_create_gzip_bomb(Z, 0) -> + zlib:deflate(Z, << >>, finish); +do_create_gzip_bomb(Z, N) -> + Data = <<0:800000>>, + Deflate = zlib:deflate(Z, Data), + [Deflate | do_create_gzip_bomb(Z, N - 1)]. + +%% Tests. + +content_encoding_none(Config) -> + doc("Requests without content-encoding are processed normally."), + Body = <<"test">>, + {200, _, Body} = do_post("/echo/normal", [], Body, Config), + %% The content-encoding header would be propagated, + %% but there was no content-encoding header to propagate. + {200, _, <<"undefined">>} = do_post("/test/content-encoding", [], Body, Config), + %% The content_decoded list is empty. + {200, _, <<"[]">>} = do_post("/test/content-decoded", [], Body, Config), + ok. + +content_encoding_malformed(Config) -> + doc("Requests with a malformed content-encoding are processed " + "as if no content-encoding was sent."), + Body = <<"test">>, + {200, _, Body} = do_post("/echo/normal", + [{<<"content-encoding">>, <<";">>}], Body, Config), + %% The content-encoding header is propagated. + {200, _, <<";">>} = do_post("/test/content-encoding", + [{<<"content-encoding">>, <<";">>}], Body, Config), + %% The content_decoded list is empty. + {200, _, <<"[]">>} = do_post("/test/content-decoded", + [{<<"content-encoding">>, <<";">>}], Body, Config), + ok. + +content_encoding_not_supported(Config) -> + doc("Requests with an unsupported content-encoding are processed " + "as if no content-encoding was sent."), + Body = <<"test">>, + {200, _, Body} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"compress">>}], Body, Config), + %% The content-encoding header is propagated. + {200, _, <<"compress">>} = do_post("/test/content-encoding", + [{<<"content-encoding">>, <<"compress">>}], Body, Config), + %% The content_decoded list is empty. + {200, _, <<"[]">>} = do_post("/test/content-decoded", + [{<<"content-encoding">>, <<"compress">>}], Body, Config), + ok. + +content_encoding_multiple(Config) -> + doc("Requests with multiple content-encoding values are processed " + "as if no content-encoding was sent."), + Body = <<"test">>, + {200, _, Body} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip, compress">>}], Body, Config), + %% The content-encoding header is propagated. + {200, _, <<"gzip, compress">>} = do_post("/test/content-encoding", + [{<<"content-encoding">>, <<"gzip, compress">>}], Body, Config), + %% The content_decoded list is empty. + {200, _, <<"[]">>} = do_post("/test/content-decoded", + [{<<"content-encoding">>, <<"gzip, compress">>}], Body, Config), + ok. + +decompress(Config) -> + doc("Requests with content-encoding set to gzip and gzipped data " + "are transparently decompressed."), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, _, Data} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% The content-encoding header is NOT propagated. + {200, _, <<"undefined">>} = do_post("/test/content-encoding", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% The content_decoded list contains <<"gzip">>. + {200, _, <<"[<<\"gzip\">>]">>} = do_post("/test/content-decoded", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + ok. + +decompress_error(Config) -> + doc("Requests with content-encoding set to gzip but the data " + "cannot be decoded are rejected with a 400 Bad Request error."), + Body = <<"test">>, + {400, _, _} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + ok. + +decompress_stream(Config) -> + doc("Requests with content-encoding set to gzip and gzipped data " + "are transparently decompressed, even when the data is streamed."), + %% Handler read length 1KB. Compressing 3KB should be enough to trigger more. + Data = crypto:strong_rand_bytes(3000), + Body = zlib:gzip(Data), + Size = byte_size(Body), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}]), + gun:data(ConnPid, Ref, nofin, binary:part(Body, 0, Size div 2)), + timer:sleep(1000), + gun:data(ConnPid, Ref, fin, binary:part(Body, Size div 2, Size div 2 + Size rem 2)), + {response, IsFin, 200, _} = gun:await(ConnPid, Ref), + {ok, Data} = case IsFin of + nofin -> gun:await_body(ConnPid, Ref); + fin -> {ok, <<>>} + end, + gun:close(ConnPid), + %% The content-encoding header is NOT propagated. + ConnPid2 = gun_open(Config), + Ref2 = gun:post(ConnPid2, "/test/content-encoding", + [{<<"content-encoding">>, <<"gzip">>}]), + {response, nofin, 200, _} = gun:await(ConnPid2, Ref2), + {ok, <<"undefined">>} = gun:await_body(ConnPid2, Ref2), + gun:close(ConnPid2), + %% The content_decoded list contains <<"gzip">>. + ConnPid3 = gun_open(Config), + Ref3 = gun:post(ConnPid3, "/test/content-decoded", + [{<<"content-encoding">>, <<"gzip">>}]), + {response, nofin, 200, _} = gun:await(ConnPid3, Ref3), + {ok, <<"[<<\"gzip\">>]">>} = gun:await_body(ConnPid3, Ref3), + gun:close(ConnPid3). + +opts_decompress_enabled_false(Config0) -> + doc("Confirm that the decompress_enabled option can be set."), + Fun = case config(ref, Config0) of + HTTPS when HTTPS =:= https_compress; HTTPS =:= https -> init_https; + H2 when H2 =:= h2_compress; H2 =:= h2 -> init_http2; + _ -> init_http + end, + Config = cowboy_test:Fun(?FUNCTION_NAME, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config0))}, + stream_handlers => [cowboy_decompress_h, cowboy_stream_h], + decompress_enabled => false + }, Config0), + Data = <<"test">>, + Body = zlib:gzip(Data), + try + {200, Headers, Body} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% We do not set accept-encoding when we are disabled. + false = lists:keyfind(<<"accept-encoding">>, 1, Headers) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +set_options_decompress_enabled_false(Config) -> + doc("Confirm that the decompress_enabled option can be dynamically " + "set to false and the data received is not decompressed."), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Body} = do_post("/echo/decompress_disable", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% We do not set accept-encoding when we are disabled. + false = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +set_options_decompress_disable_in_the_middle(Config) -> + doc("Confirm that setting the decompress_enabled option dynamically " + "to false after starting to read the body does not disable decompression " + "and the data received is decompressed."), + Data = rand:bytes(1000000), + Body = zlib:gzip(Data), + %% Since we were not ignoring before starting to read, + %% we receive the entire body decompressed. + {200, Headers, Data} = do_post("/test/disable-in-the-middle", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% We do set accept-encoding when we are enabled, + %% even if an attempt to disable in the middle is ignored. + {_, _} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +set_options_decompress_enable_in_the_middle(Config0) -> + doc("Confirm that setting the decompress_enabled option dynamically " + "to true after starting to read the body does not enable decompression " + "and the data received is not decompressed."), + Fun = case config(ref, Config0) of + HTTPS when HTTPS =:= https_compress; HTTPS =:= https -> init_https; + H2 when H2 =:= h2_compress; H2 =:= h2 -> init_http2; + _ -> init_http + end, + Config = cowboy_test:Fun(?FUNCTION_NAME, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config0))}, + stream_handlers => [cowboy_decompress_h, cowboy_stream_h], + decompress_enabled => false + }, Config0), + Data = rand:bytes(1000000), + Body = zlib:gzip(Data), + try + %% Since we were ignoring before starting to read, + %% we receive the entire body compressed. + {200, Headers, Body} = do_post("/test/enable-in-the-middle", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + %% We do not set accept-encoding when we are disabled, + %% even if an attempt to enable in the middle is ignored. + false = lists:keyfind(<<"accept-encoding">>, 1, Headers) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +opts_decompress_ratio_limit(Config0) -> + doc("Confirm that the decompress_ratio_limit option can be set."), + Fun = case config(ref, Config0) of + HTTPS when HTTPS =:= https_compress; HTTPS =:= https -> init_https; + H2 when H2 =:= h2_compress; H2 =:= h2 -> init_http2; + _ -> init_http + end, + Config = cowboy_test:Fun(?FUNCTION_NAME, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config0))}, + stream_handlers => [cowboy_decompress_h, cowboy_stream_h], + decompress_ratio_limit => 1 + }, Config0), + %% Data must be big enough for compression to be effective, + %% so that ratio_limit=1 will fail. + Data = <<0:800>>, + Body = zlib:gzip(Data), + try + {413, _, _} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +set_options_decompress_ratio_limit(Config) -> + doc("Confirm that the decompress_ratio_limit option can be dynamically set."), + %% Data must be big enough for compression to be effective, + %% so that ratio_limit=1 will fail. + Data = <<0:800>>, + Body = zlib:gzip(Data), + {413, _, _} = do_post("/echo/decompress_ratio_limit", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + ok. + +gzip_bomb(Config) -> + doc("Confirm that requests are rejected with a 413 Payload Too Large " + "error when the ratio limit is exceeded."), + Body = create_gzip_bomb(), + {413, _, _} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + ok. + +set_accept_encoding_response(Config) -> + doc("Header accept-encoding must be set on valid response command. " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/echo/normal", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"gzip">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +set_accept_encoding_header(Config) -> + doc("Header accept-encoding must be set on valid header command. " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/header-command", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"gzip">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +add_accept_encoding_header_valid(Config) -> + doc("Supported content codings must be added to the accept-encoding " + "header if it already exists. (RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/accept-identity", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"identity, gzip">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +override_accept_encoding_header_invalid(Config) -> + doc("When the stream handler cannot parse the accept-encoding header " + "found in the response, it overrides it."), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/invalid-header", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"gzip">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +override_accept_encoding_excluded(Config) -> + doc("The stream handler must ensure that the content encodings " + "it supports are not marked as unsupported in response headers. " + "The stream handler enables gzip when explicitly excluded. " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/reject-explicit-header", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"identity;q=1, gzip;q=1">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +%% *;q=0 will reject codings that are not listed. Supported codings +%% must always be enabled when the handler is used. +add_accept_encoding_excluded(Config) -> + doc("The stream handler must ensure that the content encodings " + "it supports are not marked as unsupported in response headers. " + "The stream handler enables gzip when implicitly excluded (*;q=0). " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/reject-implicit-header", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"gzip;q=1, identity;q=1, *;q=0">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +no_override_accept_coding_set_explicit(Config) -> + doc("Confirm that accept-encoding is not overridden when the " + "content encodings it supports are explicitly set. " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/accept-explicit-header", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"identity, gzip;q=0.5">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. + +no_override_accept_coding_set_implicit(Config) -> + doc("Confirm that accept-encoding is not overridden when the " + "content encodings it supports are implicitly set. " + "(RFC9110 12.5.3)"), + Data = <<"test">>, + Body = zlib:gzip(Data), + {200, Headers, Data} = do_post("/test/accept-implicit-header", + [{<<"content-encoding">>, <<"gzip">>}], Body, Config), + {_, <<"identity, *;q=0.5">>} = lists:keyfind(<<"accept-encoding">>, 1, Headers), + ok. diff --git a/test/draft_h3_webtransport_SUITE.erl b/test/draft_h3_webtransport_SUITE.erl new file mode 100644 index 0000000..05a6c17 --- /dev/null +++ b/test/draft_h3_webtransport_SUITE.erl @@ -0,0 +1,814 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(draft_h3_webtransport_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). +-import(rfc9114_SUITE, [do_wait_stream_aborted/1]). + +-ifdef(COWBOY_QUICER). + +-include_lib("quicer/include/quicer.hrl"). + +all() -> + [{group, enabled}]. + +groups() -> + Tests = ct_helper:all(?MODULE), + [{enabled, [], Tests}]. %% @todo Enable parallel when all is better. + +init_per_group(Name = enabled, Config) -> + cowboy_test:init_http3(Name, #{ + enable_connect_protocol => true, + h3_datagram => true, + enable_webtransport => true, %% For compatibility with draft-02. + wt_max_sessions => 10, + env => #{dispatch => cowboy_router:compile(init_routes(Config))} + }, Config). + +end_per_group(Name, _) -> + cowboy_test:stop_group(Name). + +init_routes(_) -> [ + {"localhost", [ + {"/wt", wt_echo_h, []} + ]} +]. + +%% Temporary. + +%% To start Chromium the command line is roughly: +%% chromium --ignore-certificate-errors-spki-list=LeLykt63i2FRAm+XO91yBoSjKfrXnAFygqe5xt0zgDA= --ignore-certificate-errors --user-data-dir=/tmp/chromium-wt --allow-insecure-localhost --webtransport-developer-mode --enable-quic https://googlechrome.github.io/samples/webtransport/client.html +%% +%% To find the SPKI the command is roughly: +%% openssl x509 -in ~/ninenines/cowboy/test/rfc9114_SUITE_data/server.pem -pubkey -noout | \ +%% openssl pkey -pubin -outform der | \ +%% openssl dgst -sha256 -binary | \ +%% openssl enc -base64 + +%run(Config) -> +% ct:pal("port ~p", [config(port, Config)]), +% timer:sleep(infinity). + +%% 3. Session Establishment + +%% 3.1. Establishing a WebTransport-Capable HTTP/3 Connection + +%% In order to indicate support for WebTransport, the server MUST send a SETTINGS_WT_MAX_SESSIONS value greater than "0" in its SETTINGS frame. (3.1) +%% @todo reject_session_disabled +%% @todo accept_session_below +%% @todo accept_session_equal +%% @todo reject_session_above + +%% The client MUST NOT send a WebTransport request until it has received the setting indicating WebTransport support from the server. (3.1) + +%% For draft verisons of WebTransport only, the server MUST NOT process any incoming WebTransport requests until the client settings have been received, as the client may be using a version of the WebTransport extension that is different from the one used by the server. (3.1) + +%% Because WebTransport over HTTP/3 requires support for HTTP/3 datagrams and the Capsule Protocol, both the client and the server MUST indicate support for HTTP/3 datagrams by sending a SETTINGS_H3_DATAGRAM value set to 1 in their SETTINGS frame (see Section 2.1.1 of [HTTP-DATAGRAM]). (3.1) +%% @todo settings_h3_datagram_enabled + +%% WebTransport over HTTP/3 also requires support for QUIC datagrams. To indicate support, both the client and the server MUST send a max_datagram_frame_size transport parameter with a value greater than 0 (see Section 3 of [QUIC-DATAGRAM]). (3.1) +%% @todo quic_datagram_enabled (if size is too low the CONNECT stream can be used for capsules) + +%% Any WebTransport requests sent by the client without enabling QUIC and HTTP datagrams MUST be treated as malformed by the server, as described in Section 4.1.2 of [HTTP3]. (3.1) +%% @todo reject_h3_datagram_disabled +%% @todo reject_quic_datagram_disabled + +%% WebTransport over HTTP/3 relies on the RESET_STREAM_AT frame defined in [RESET-STREAM-AT]. To indicate support, both the client and the server MUST enable the extension as described in Section 3 of [RESET-STREAM-AT]. (3.1) +%% @todo reset_stream_at_enabled + +%% 3.2. Extended CONNECT in HTTP/3 + +%% [RFC8441] defines an extended CONNECT method in Section 4, enabled by the SETTINGS_ENABLE_CONNECT_PROTOCOL setting. That setting is defined for HTTP/3 by [RFC9220]. A server supporting WebTransport over HTTP/3 MUST send both the SETTINGS_WT_MAX_SESSIONS setting with a value greater than "0" and the SETTINGS_ENABLE_CONNECT_PROTOCOL setting with a value of "1". (3.2) +%% @todo settings_enable_connect_protocol_enabled +%% @todo reject_settings_enable_connect_protocol_disabled + +%% 3.3. Creating a New Session + +%% As WebTransport sessions are established over HTTP/3, they are identified using the https URI scheme ([HTTP], Section 4.2.2). (3.3) + +%% In order to create a new WebTransport session, a client can send an HTTP CONNECT request. The :protocol pseudo-header field ([RFC8441]) MUST be set to webtransport. The :scheme field MUST be https. Both the :authority and the :path value MUST be set; those fields indicate the desired WebTransport server. If the WebTransport session is coming from a browser client, an Origin header [RFC6454] MUST be provided within the request; otherwise, the header is OPTIONAL. (3.3) + +%% If it does not (have a WT server), it SHOULD reply with status code 404 (Section 15.5.5 of [HTTP]). (3.3) + +%% When the request contains the Origin header, the WebTransport server MUST verify the Origin header to ensure that the specified origin is allowed to access the server in question. If the verification fails, the WebTransport server SHOULD reply with status code 403 (Section 15.5.4 of [HTTP]). (3.3) + +accept_session_when_enabled(Config) -> + doc("Confirm that a WebTransport session can be established over HTTP/3. " + "(draft_webtrans_http3 3.3, RFC9220)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send Hello, get Hello back. + {ok, BidiStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(BidiStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "Hello">>), + {nofin, <<"Hello">>} = do_receive_data(BidiStreamRef), + ok. + +%% If the server accepts 0-RTT, the server MUST NOT reduce the limit of maximum open WebTransport sessions from the one negotiated during the previous session; such change would be deemed incompatible, and MUST result in a H3_SETTINGS_ERROR connection error. (3.3) + +%% The capsule-protocol header field Section 3.4 of [HTTP-DATAGRAM] is not required by WebTransport and can safely be ignored by WebTransport endpoints. (3.3) + +%% 3.4. Application Protocol Negotiation + +application_protocol_negotiation(Config) -> + doc("Applications can negotiate a protocol to use via WebTransport. " + "(draft_webtrans_http3 3.4)"), + %% Connect to the WebTransport server. + WTAvailableProtocols = cow_http_hd:wt_available_protocols([<<"foo">>, <<"bar">>]), + #{ + resp_headers := RespHeaders + } = do_webtransport_connect(Config, [{<<"wt-available-protocols">>, WTAvailableProtocols}]), + {<<"wt-protocol">>, WTProtocol} = lists:keyfind(<<"wt-protocol">>, 1, RespHeaders), + <<"foo">> = iolist_to_binary(cow_http_hd:parse_wt_protocol(WTProtocol)), + ok. + +%% Both WT-Available-Protocols and WT-Protocol are Structured Fields [RFC8941]. WT-Available-Protocols is a List of Tokens, and WT-Protocol is a Token. The token in the WT-Protocol response header field MUST be one of the tokens listed in WT-Available-Protocols of the request. (3.4) + +%% @todo 3.5 Prioritization + +%% 4. WebTransport Features + +%% The client MAY optimistically open unidirectional and bidirectional streams, as well as send datagrams, for a session that it has sent the CONNECT request for, even if it has not yet received the server's response to the request. (4) + +%% If at any point a session ID is received that cannot be a valid ID for a client-initiated bidirectional stream, the recipient MUST close the connection with an H3_ID_ERROR error code. (4) +%% @todo Open bidi with Session ID 0, then do the CONNECT request. + +%% 4.1. Unidirectional streams + +unidirectional_streams(Config) -> + doc("Both endpoints can open and use unidirectional streams. " + "(draft_webtrans_http3 4.1)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a unidi stream, send Hello with a Fin flag. + {ok, LocalStreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(LocalStreamRef, + <<1:2, 16#54:14, 0:2, SessionID:6, "Hello">>, + ?QUIC_SEND_FLAG_FIN), + %% Accept an identical unidi stream. + {unidi, RemoteStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#54:14, 0:2, SessionID:6>>} = do_receive_data(RemoteStreamRef), + {fin, <<"Hello">>} = do_receive_data(RemoteStreamRef), + ok. + +%% 4.2. Bidirectional Streams + +bidirectional_streams_client(Config) -> + doc("The WT client can open and use bidirectional streams. " + "(draft_webtrans_http3 4.2)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send Hello, get Hello back. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "Hello">>), + {nofin, <<"Hello">>} = do_receive_data(LocalStreamRef), + ok. + +bidirectional_streams_server(Config) -> + doc("The WT server can open and use bidirectional streams. " + "(draft_webtrans_http3 4.2)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction + %% to make the server create another bidi stream. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:open_bidi">>), + %% Accept the bidi stream and receive the data. + {bidi, RemoteStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#41:14, 0:2, SessionID:6>>} = do_receive_data(RemoteStreamRef), + {ok, _} = quicer:send(RemoteStreamRef, <<"Hello">>, + ?QUIC_SEND_FLAG_FIN), + {fin, <<"Hello">>} = do_receive_data(RemoteStreamRef), + ok. + +%% Endpoints MUST NOT send WT_STREAM as a frame type on HTTP/3 streams other than the very first bytes of a request stream. Receiving this frame type in any other circumstances MUST be treated as a connection error of type H3_FRAME_ERROR. (4.2) + +%% 4.3. Resetting Data Streams + +%% A WebTransport endpoint may send a RESET_STREAM or a STOP_SENDING frame for a WebTransport data stream. Those signals are propagated by the WebTransport implementation to the application. (4.3) + +%% A WebTransport application SHALL provide an error code for those operations. (4.3) + +%% WebTransport implementations MUST use the RESET_STREAM_AT frame [RESET-STREAM-AT] with a Reliable Size set to at least the size of the WebTransport header when resetting a WebTransport data stream. This ensures that the ID field associating the data stream with a WebTransport session is always delivered. (4.3) + +%% WebTransport implementations SHALL forward the error code for a stream associated with a known session to the application that owns that session (4.3) + +%% 4.4. Datagrams + +datagrams(Config) -> + doc("Both endpoints can send and receive datagrams. (draft_webtrans_http3 4.4)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + session_id := SessionID + } = do_webtransport_connect(Config), + QuarterID = SessionID div 4, + %% Send a Hello datagram. + {ok, _} = quicer:send_dgram(Conn, <<0:2, QuarterID:6, "Hello">>), + %% Receive a Hello datagram back. + {datagram, SessionID, <<"Hello">>} = do_receive_datagram(Conn), + ok. + +%% @todo datagrams_via_capsule? + +%% 4.5. Buffering Incoming Streams and Datagrams + +%% To handle this case (out of order stream_open/CONNECT), WebTransport endpoints SHOULD buffer streams and datagrams until those can be associated with an established session. (4.5) + +%% To avoid resource exhaustion, the endpoints MUST limit the number of buffered streams and datagrams. When the number of buffered streams is exceeded, a stream SHALL be closed by sending a RESET_STREAM and/or STOP_SENDING with the WT_BUFFERED_STREAM_REJECTED error code. When the number of buffered datagrams is exceeded, a datagram SHALL be dropped. It is up to an implementation to choose what stream or datagram to discard. (4.5) + +%% 4.6. Interaction with HTTP/3 GOAWAY frame + +%% A client receiving GOAWAY cannot initiate CONNECT requests for new WebTransport sessions on that HTTP/3 connection; it must open a new HTTP/3 connection to initiate new WebTransport sessions with the same peer. (4.6) + +%% An HTTP/3 GOAWAY frame is also a signal to applications to initiate shutdown for all WebTransport sessions. (4.6) + +%% @todo Currently receipt of a GOAWAY frame immediately ends the connection. +%% We want to allow WT sessions to gracefully shut down before that. +%goaway_client(Config) -> +% doc("The HTTP/3 client can initiate the close of all WT sessions " +% "by sending a GOAWAY frame. (draft_webtrans_http3 4.6)"), +% %% Connect to the WebTransport server. +% #{ +% conn := Conn, +% connect_stream_ref := ConnectStreamRef, +% session_id := SessionID +% } = do_webtransport_connect(Config), +% %% Open a control stream and send a GOAWAY frame. +% {ok, ControlRef} = quicer:start_stream(Conn, +% #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), +% {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), +% {ok, _} = quicer:send(ControlRef, [ +% <<0>>, %% CONTROL stream. +% SettingsBin, +% <<7>>, %% GOAWAY frame. +% cow_http3:encode_int(1), +% cow_http3:encode_int(0) +% ]), +% %% Receive a datagram indicating processing by the WT handler. +% {datagram, SessionID, <<"TEST:close_initiated">>} = do_receive_datagram(Conn), +% ok. + +wt_drain_session_client(Config) -> + doc("The WT client can initiate the close of a single session. " + "(draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Send the WT_DRAIN_SESSION capsule on the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, cow_capsule:wt_drain_session()), + %% Receive a datagram indicating processing by the WT handler. + {datagram, SessionID, <<"TEST:close_initiated">>} = do_receive_datagram(Conn), + ok. + +wt_drain_session_server(Config) -> + doc("The WT server can initiate the close of a single session. " + "(draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it initiate the close. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:initiate_close">>), + %% Receive the WT_DRAIN_SESSION capsule on the CONNECT stream. + DrainWTSessionCapsule = cow_capsule:wt_drain_session(), + {nofin, DrainWTSessionCapsule} = do_receive_data(ConnectStreamRef), + ok. + +wt_drain_session_continue_client(Config) -> + doc("After the WT client has initiated the close of the session, " + "both client and server can continue using the session and " + "open new streams. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Send the WT_DRAIN_SESSION capsule on the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, cow_capsule:wt_drain_session()), + %% Receive a datagram indicating processing by the WT handler. + {datagram, SessionID, <<"TEST:close_initiated">>} = do_receive_datagram(Conn), + %% Create a new bidi stream, send Hello, get Hello back. + {ok, ContinueStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(ContinueStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "Hello">>), + {nofin, <<"Hello">>} = do_receive_data(ContinueStreamRef), + ok. + +wt_drain_session_continue_server(Config) -> + doc("After the WT server has initiated the close of the session, " + "both client and server can continue using the session and " + "open new streams. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it initiate the close. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:initiate_close">>), + %% Receive the WT_DRAIN_SESSION capsule on the CONNECT stream. + DrainWTSessionCapsule = cow_capsule:wt_drain_session(), + {nofin, DrainWTSessionCapsule} = do_receive_data(ConnectStreamRef), + %% Create a new bidi stream, send Hello, get Hello back. + {ok, ContinueStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(ContinueStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "Hello">>), + {nofin, <<"Hello">>} = do_receive_data(ContinueStreamRef), + ok. + +%% @todo 4.7. Use of Keying Material Exporters + +%% 5. Flow Control + +%% 5.1. Limiting the Number of Simultaneous Sessions + +%% This document defines a SETTINGS_WT_MAX_SESSIONS parameter that allows the server to limit the maximum number of concurrent WebTransport sessions on a single HTTP/3 connection. The client MUST NOT open more simultaneous sessions than indicated in the server SETTINGS parameter. The server MUST NOT close the connection if the client opens sessions exceeding this limit, as the client and the server do not have a consistent view of how many sessions are open due to the asynchronous nature of the protocol; instead, it MUST reset all of the CONNECT streams it is not willing to process with the H3_REQUEST_REJECTED status defined in [HTTP3]. (5.1) + +%% 5.2. Limiting the Number of Streams Within a Session + +%% The WT_MAX_STREAMS capsule (Section 5.6.1) establishes a limit on the number of streams within a WebTransport session. (5.2) + +%% Note that the CONNECT stream for the session is not included in either the bidirectional or the unidirectional stream limits (5.2) + +%% The session-level stream limit applies in addition to the QUIC MAX_STREAMS frame, which provides a connection-level stream limit. New streams can only be created within the session if both the stream- and the connection-level limit permit (5.2) + +%% The WT_STREAMS_BLOCKED capsule (Section 5.7) can be sent to indicate that an endpoint was unable to create a stream due to the session-level stream limit. (5.2) + +%% Note that enforcing this limit requires reliable resets for stream headers so that both endpoints can agree on the number of streams that are open. (5.2) + +%% 5.3. Data Limits + +%% The WT_MAX_DATA capsule (Section 5.8) establishes a limit on the amount of data that can be sent within a WebTransport session. This limit counts all data that is sent on streams of the corresponding type, excluding the stream header (see Section 4.1 and Section 4.2). (5.3) + +%% Implementing WT_MAX_DATA requires that the QUIC stack provide the WebTransport implementation with information about the final size of streams; see { {Section 4.5 of !RFC9000}}. This allows both endpoints to agree on how much data was consumed by that stream, although the stream header exclusion above applies. (5.3) + +%% The WT_DATA_BLOCKED capsule (Section 5.9) can be sent to indicate that an endpoint was unable to send data due to a limit set by the WT_MAX_DATA capsule. (5.3) + +%% The WT_MAX_STREAM_DATA and WT_STREAM_DATA_BLOCKED capsules (Part XX of [I-D.ietf-webtrans-http2]) are not used and so are prohibited. Endpoints MUST treat receipt of a WT_MAX_STREAM_DATA or a WT_STREAM_DATA_BLOCKED capsule as a session error. (5.3) + +%% 5.4. Flow Control and Intermediaries + +%% In practice, an intermediary that translates flow control signals between similar WebTransport protocols, such as between two HTTP/3 connections, can often simply reexpress the same limits received on one connection directly on the other connection. (5.4) + +%% 5.5. Flow Control SETTINGS + +%% WT_MAX_STREAMS via SETTINGS_WT_INITIAL_MAX_STREAMS_UNI and SETTINGS_WT_INITIAL_MAX_STREAMS_BIDI (5.5) + +%% WT_MAX_DATA via SETTINGS_WT_INITIAL_MAX_DATA (5.5) + +%% 5.6. Flow Control Capsules + +%% 5.6.1. WT_MAX_STREAMS Capsule + +%% An HTTP capsule [HTTP-DATAGRAM] called WT_MAX_STREAMS is introduced to inform the peer of the cumulative number of streams of a given type it is permitted to open. A WT_MAX_STREAMS capsule with a type of 0x190B4D3F applies to bidirectional streams, and a WT_MAX_STREAMS capsule with a type of 0x190B4D40 applies to unidirectional streams. (5.6.1) + +%% Note that, because Maximum Streams is a cumulative value representing the total allowed number of streams, including previously closed streams, endpoints repeatedly send new WT_MAX_STREAMS capsules with increasing Maximum Streams values as streams are opened. (5.6.1) + +%% Maximum Streams: A count of the cumulative number of streams of the corresponding type that can be opened over the lifetime of the session. This value cannot exceed 260, as it is not possible to encode stream IDs larger than 262-1. (5.6.1) + +%% An endpoint MUST NOT open more streams than permitted by the current stream limit set by its peer. (5.6.1) + +%% Note that this limit includes streams that have been closed as well as those that are open. (5.6.1) + +%% Initial values for these limits MAY be communicated by sending non-zero values for SETTINGS_WT_INITIAL_MAX_STREAMS_UNI and SETTINGS_WT_INITIAL_MAX_STREAMS_BIDI. (5.6.1) + +%% 5.7. WT_STREAMS_BLOCKED Capsule + +%% A sender SHOULD send a WT_STREAMS_BLOCKED capsule (type=0x190B4D43 for bidi or 0x190B4D44 for unidi) when it wishes to open a stream but is unable to do so due to the maximum stream limit set by its peer. (5.7) + +%% 5.8. WT_MAX_DATA Capsule + +%% An HTTP capsule [HTTP-DATAGRAM] called WT_MAX_DATA (type=0x190B4D3D) is introduced to inform the peer of the maximum amount of data that can be sent on the WebTransport session as a whole. (5.8) + +%% This limit counts all data that is sent on streams of the corresponding type, excluding the stream header (see Section 4.1 and Section 4.2). Implementing WT_MAX_DATA requires that the QUIC stack provide the WebTransport implementation with information about the final size of streams; see Section 4.5 of [RFC9000]. (5.8) + +%% All data sent in WT_STREAM capsules counts toward this limit. The sum of the lengths of Stream Data fields in WT_STREAM capsules MUST NOT exceed the value advertised by a receiver. (5.8) + +%% The initial value for this limit MAY be communicated by sending a non-zero value for SETTINGS_WT_INITIAL_MAX_DATA. (5.8) + +%% 5.9. WT_DATA_BLOCKED Capsule + +%% A sender SHOULD send a WT_DATA_BLOCKED capsule (type=0x190B4D41) when it wishes to send data but is unable to do so due to WebTransport session-level flow control. (5.9) + +%% WT_DATA_BLOCKED capsules can be used as input to tuning of flow control algorithms. (5.9) + +%% 6. Session Termination + +%% A WebTransport session over HTTP/3 is considered terminated when either of the following conditions is met: +%% * the CONNECT stream is closed, either cleanly or abruptly, on either side; or +%% * a WT_CLOSE_SESSION capsule is either sent or received. +%% (6) + +wt_close_session_client(Config) -> + doc("The WT client can close a single session. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + connect_stream_ref := ConnectStreamRef + } = do_webtransport_connect(Config), + %% Send the WT_CLOSE_SESSION capsule on the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, + cow_capsule:wt_close_session(0, <<>>), + ?QUIC_SEND_FLAG_FIN), + %% Normally we should also stop reading but in order to detect + %% that the server stops the stream we must not otherwise the + %% stream will be de facto closed on our end. + %% + %% The recipient must close or reset the stream in response. + receive + {quic, stream_closed, ConnectStreamRef, _} -> + ok + after 1000 -> + error({timeout, waiting_for_stream_closed}) + end. + +wt_close_session_server(Config) -> + doc("The WT server can close a single session. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it initiate the close. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:close">>), + %% Receive the WT_CLOSE_SESSION capsule on the CONNECT stream. + CloseWTSessionCapsule = cow_capsule:wt_close_session(0, <<>>), + {fin, CloseWTSessionCapsule} = do_receive_data(ConnectStreamRef), + ok. + +wt_session_gone_client(Config) -> + doc("Upon learning that the session has been terminated, " + "the WT server must reset associated streams with the " + "WEBTRANSPORT_SESSION_GONE error code. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a unidi stream. + {ok, LocalUnidiStreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(LocalUnidiStreamRef, + <<1:2, 16#54:14, 0:2, SessionID:6, "Hello">>), + %% Accept an identical unidi stream. + {unidi, RemoteUnidiStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#54:14, 0:2, SessionID:6>>} = do_receive_data(RemoteUnidiStreamRef), + {nofin, <<"Hello">>} = do_receive_data(RemoteUnidiStreamRef), + %% Create a bidi stream, send a special instruction + %% to make the server create another bidi stream. + {ok, LocalBidiStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalBidiStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:open_bidi">>), + %% Accept the bidi stream and receive the data. + {bidi, RemoteBidiStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#41:14, 0:2, SessionID:6>>} = do_receive_data(RemoteBidiStreamRef), + {ok, _} = quicer:send(RemoteBidiStreamRef, <<"Hello">>), + {nofin, <<"Hello">>} = do_receive_data(RemoteBidiStreamRef), + %% Send the WT_CLOSE_SESSION capsule on the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, + cow_capsule:wt_close_session(0, <<>>), + ?QUIC_SEND_FLAG_FIN), + %% All streams from that WT session have been aborted. + #{reason := wt_session_gone} = do_wait_stream_aborted(LocalUnidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(RemoteUnidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(LocalBidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(RemoteBidiStreamRef), + ok. + +wt_session_gone_server(Config) -> + doc("After the session has been terminated by the WT server, " + "the WT server must reset associated streams with the " + "WT_SESSION_GONE error code. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a unidi stream. + {ok, LocalUnidiStreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(LocalUnidiStreamRef, + <<1:2, 16#54:14, 0:2, SessionID:6, "Hello">>), + %% Accept an identical unidi stream. + {unidi, RemoteUnidiStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#54:14, 0:2, SessionID:6>>} = do_receive_data(RemoteUnidiStreamRef), + {nofin, <<"Hello">>} = do_receive_data(RemoteUnidiStreamRef), + %% Create a bidi stream, send a special instruction + %% to make the server create another bidi stream. + {ok, LocalBidiStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalBidiStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, "TEST:open_bidi">>), + %% Accept the bidi stream and receive the data. + {bidi, RemoteBidiStreamRef} = do_receive_new_stream(), + {nofin, <<1:2, 16#41:14, 0:2, SessionID:6>>} = do_receive_data(RemoteBidiStreamRef), + {ok, _} = quicer:send(RemoteBidiStreamRef, <<"Hello">>), + {nofin, <<"Hello">>} = do_receive_data(RemoteBidiStreamRef), + + %% Send a special instruction to make the server initiate the close. + {ok, _} = quicer:send(LocalBidiStreamRef, <<"TEST:close">>), + %% Receive the WT_CLOSE_SESSION capsule on the CONNECT stream. + CloseWTSessionCapsule = cow_capsule:wt_close_session(0, <<>>), + {fin, CloseWTSessionCapsule} = do_receive_data(ConnectStreamRef), + %% All streams from that WT session have been aborted. + #{reason := wt_session_gone} = do_wait_stream_aborted(LocalUnidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(RemoteUnidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(LocalBidiStreamRef), + #{reason := wt_session_gone} = do_wait_stream_aborted(RemoteBidiStreamRef), + ok. + +%% Application Error Message: A UTF-8 encoded error message string provided by the application closing the session. The message takes up the remainder of the capsule, and its length MUST NOT exceed 1024 bytes. (6) +%% @todo What if it's larger? + +wt_close_session_app_code_msg_client(Config) -> + doc("The WT client can close a single session with an application error code " + "and an application error message. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it propagate events. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + EventPidBin = term_to_binary(self()), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:event_pid:", EventPidBin/binary>>), + %% Send the WT_CLOSE_SESSION capsule on the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, + cow_capsule:wt_close_session(17, <<"seventeen">>), + ?QUIC_SEND_FLAG_FIN), + %% @todo Stop reading from the CONNECt stream too. (STOP_SENDING) + %% Receive the terminate event from the WT handler. + receive + {'$wt_echo_h', terminate, {closed, 17, <<"seventeen">>}, _, _} -> + ok + after 1000 -> + error({timeout, waiting_for_terminate_event}) + end. + +wt_close_session_app_code_server(Config) -> + doc("The WT server can close a single session with an application error code. " + "(draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it initiate the close. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:close_app_code">>), + %% Receive the WT_CLOSE_SESSION capsule on the CONNECT stream. + CloseWTSessionCapsule = cow_capsule:wt_close_session(1234567890, <<>>), + {fin, CloseWTSessionCapsule} = do_receive_data(ConnectStreamRef), + ok. + +wt_close_session_app_code_msg_server(Config) -> + doc("The WT server can close a single session with an application error code " + "and an application error message. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it initiate the close. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:close_app_code_msg">>), + %% Receive the WT_CLOSE_SESSION capsule on the CONNECT stream. + CloseWTSessionCapsule = iolist_to_binary(cow_capsule:wt_close_session(1234567890, + <<"onetwothreefourfivesixseveneightnineten">>)), + {fin, CloseWTSessionCapsule} = do_receive_data(ConnectStreamRef), + ok. + +%% An endpoint that sends a WT_CLOSE_SESSION capsule MUST immediately send a FIN. The endpoint MAY send a STOP_SENDING to indicate it is no longer reading from the CONNECT stream. The recipient MUST either close or reset the stream in response. (6) +%% @todo wt_close_session_server_fin +%% @todo The part about close/reset should be tested in wt_close_session_client. + +%% If any additional stream data is received on the CONNECT stream after receiving a WT_CLOSE_SESSION capsule, the stream MUST be reset with code H3_MESSAGE_ERROR. (6) +%% @todo wt_close_session_followed_by_data + +connect_stream_closed_cleanly_fin(Config) -> + doc("The WT client closing the CONNECT stream cleanly " + "is equivalent to a capsule with an application error code of 0 " + "and an empty error string. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it propagate events. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + EventPidBin = term_to_binary(self()), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:event_pid:", EventPidBin/binary>>), + {nofin, <<"event_pid_received">>} = do_receive_data(LocalStreamRef), + %% Cleanly terminate the CONNECT stream. + {ok, _} = quicer:send(ConnectStreamRef, <<>>, ?QUIC_SEND_FLAG_FIN), + %% Receive the terminate event from the WT handler. + receive + {'$wt_echo_h', terminate, {closed, 0, <<>>}, _, _} -> + ok + after 1000 -> + error({timeout, waiting_for_terminate_event}) + end. + +connect_stream_closed_cleanly_shutdown(Config) -> + doc("The WT client closing the CONNECT stream cleanly " + "is equivalent to a capsule with an application error code of 0 " + "and an empty error string. (draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it propagate events. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + EventPidBin = term_to_binary(self()), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:event_pid:", EventPidBin/binary>>), + {nofin, <<"event_pid_received">>} = do_receive_data(LocalStreamRef), + %% Cleanly terminate the CONNECT stream. + _ = quicer:shutdown_stream(ConnectStreamRef), + %% Receive the terminate event from the WT handler. + receive + {'$wt_echo_h', terminate, {closed, 0, <<>>}, _, _} -> + ok + after 1000 -> + error({timeout, waiting_for_terminate_event}) + end. + +connect_stream_closed_abruptly(Config) -> + doc("The WT client may close the CONNECT stream abruptly. " + "(draft_webtrans_http3 4.6)"), + %% Connect to the WebTransport server. + #{ + conn := Conn, + connect_stream_ref := ConnectStreamRef, + session_id := SessionID + } = do_webtransport_connect(Config), + %% Create a bidi stream, send a special instruction to make it propagate events. + {ok, LocalStreamRef} = quicer:start_stream(Conn, #{}), + EventPidBin = term_to_binary(self()), + {ok, _} = quicer:send(LocalStreamRef, <<1:2, 16#41:14, 0:2, SessionID:6, + "TEST:event_pid:", EventPidBin/binary>>), + {nofin, <<"event_pid_received">>} = do_receive_data(LocalStreamRef), + %% Abruptly terminate the CONNECT stream. + _ = quicer:shutdown_stream(ConnectStreamRef, ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT, + 0, infinity), + %% Receive the terminate event from the WT handler. + receive + %% @todo It would be good to forward a stream error as well + %% so that a WT error can be sent, but I have been unsuccessful. + {'$wt_echo_h', terminate, closed_abruptly, _, _} -> + ok + after 1000 -> + error({timeout, waiting_for_terminate_event}) + end. + +%% @todo This one is about gracefully closing HTTP/3 connection with WT sessions. +%% the endpoint SHOULD wait until all CONNECT streams have been closed by the peer before sending the CONNECTION_CLOSE (6) + +%% Helpers. + +do_webtransport_connect(Config) -> + do_webtransport_connect(Config, []). + +do_webtransport_connect(Config, ExtraHeaders) -> + %% Connect to server. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config, #{ + peer_unidi_stream_count => 100, + datagram_send_enabled => 1, + datagram_receive_enabled => 1 + }), + %% Confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{enable_connect_protocol := true} = Settings, + %% Confirm that SETTINGS_WT_MAX_SESSIONS >= 1. + #{wt_max_sessions := WTMaxSessions} = Settings, + true = WTMaxSessions >= 1, + %% Confirm that SETTINGS_H3_DATAGRAM = 1. + #{h3_datagram := true} = Settings, + %% Confirm that QUIC's max_datagram_size > 0. + receive {quic, dgram_state_changed, Conn, DatagramState} -> + #{ + dgram_max_len := DatagramMaxLen, + dgram_send_enabled := DatagramSendEnabled + } = DatagramState, + true = DatagramMaxLen > 0, + true = DatagramSendEnabled, + ok + after 5000 -> + error({timeout, waiting_for_datagram_state_change}) + end, + %% Send a CONNECT :protocol request to upgrade the stream to Websocket. + {ok, ConnectStreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"webtransport">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/wt">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"origin">>, <<"https://localhost">>} + |ExtraHeaders], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(ConnectStreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% Receive a 200 response. + {nofin, Data} = do_receive_data(ConnectStreamRef), + {HLenEnc, HLenBits} = rfc9114_SUITE:do_guess_int_encoding(Data), + << + 1, %% HEADERS frame. + HLenEnc:2, HLen:HLenBits, + EncodedResponse:HLen/bytes + >> = Data, + {ok, DecodedResponse, _DecData, _DecSt} + = cow_qpack:decode_field_section(EncodedResponse, 0, cow_qpack:init(decoder)), + #{<<":status">> := <<"200">>} = maps:from_list(DecodedResponse), + %% Retrieve the Session ID. + {ok, SessionID} = quicer:get_stream_id(ConnectStreamRef), + %% Accept QPACK streams to avoid conflicts with unidi streams from tests. + Unidi1 = rfc9114_SUITE:do_accept_qpack_stream(Conn), + Unidi2 = rfc9114_SUITE:do_accept_qpack_stream(Conn), + %% Done. + #{ + conn => Conn, + connect_stream_ref => ConnectStreamRef, + session_id => SessionID, + resp_headers => DecodedResponse, + enc_or_dec1 => Unidi1, + enc_or_dec2 => Unidi2 + }. + +do_receive_new_stream() -> + receive + {quic, new_stream, StreamRef, #{flags := Flags}} -> + ok = quicer:setopt(StreamRef, active, true), + case quicer:is_unidirectional(Flags) of + true -> {unidi, StreamRef}; + false -> {bidi, StreamRef} + end + after 5000 -> + error({timeout, waiting_for_stream}) + end. + +do_receive_data(StreamRef) -> + receive {quic, Data, StreamRef, #{flags := Flags}} -> + IsFin = case Flags band ?QUIC_RECEIVE_FLAG_FIN of + ?QUIC_RECEIVE_FLAG_FIN -> fin; + _ -> nofin + end, + {IsFin, Data} + after 5000 -> + error({timeout, waiting_for_data}) + end. + +do_receive_datagram(Conn) -> + receive {quic, <<0:2, QuarterID:6, Data/bits>>, Conn, Flags} when is_integer(Flags) -> + {datagram, QuarterID * 4, Data} + after 5000 -> + ct:pal("~p", [process_info(self(), messages)]), + error({timeout, waiting_for_datagram}) + end. + +-endif. diff --git a/test/examples_SUITE.erl b/test/examples_SUITE.erl index 0a3b0eb..3d7c48b 100644 --- a/test/examples_SUITE.erl +++ b/test/examples_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -35,10 +35,10 @@ init_per_suite(Config) -> %% reuse the same build across all tests. Make = do_find_make_cmd(), CommonDir = config(priv_dir, Config), - ct:log("~s~n", [os:cmd("git clone --depth 1 https://github.com/ninenines/cowboy " + ct:log("~ts~n", [os:cmd("git clone --depth 1 https://github.com/ninenines/cowboy " ++ CommonDir ++ "cowboy")]), - ct:log("~s~n", [os:cmd(Make ++ " -C " ++ CommonDir ++ "cowboy distclean")]), - ct:log("~s~n", [os:cmd(Make ++ " -C " ++ CommonDir ++ "cowboy DEPS_DIR=" ++ CommonDir)]), + ct:log("~ts~n", [os:cmd(Make ++ " -C " ++ CommonDir ++ "cowboy distclean")]), + ct:log("~ts~n", [os:cmd(Make ++ " -C " ++ CommonDir ++ "cowboy DEPS_DIR=" ++ CommonDir)]), Config. end_per_suite(_) -> @@ -70,24 +70,24 @@ do_get_paths(Example0) -> do_compile_and_start(Example, Config) -> Make = do_find_make_cmd(), {Dir, Rel, _} = do_get_paths(Example), - ct:log("~s~n", [os:cmd(Make ++ " -C " ++ Dir ++ " distclean")]), + ct:log("~ts~n", [os:cmd(Make ++ " -C " ++ Dir ++ " distclean")]), %% We use a common build for Cowboy, Cowlib and Ranch to speed things up. CommonDir = config(priv_dir, Config), - ct:log("~s~n", [os:cmd("mkdir " ++ Dir ++ "/deps")]), - ct:log("~s~n", [os:cmd("ln -s " ++ CommonDir ++ "cowboy " ++ Dir ++ "/deps/cowboy")]), - ct:log("~s~n", [os:cmd("ln -s " ++ CommonDir ++ "cowlib " ++ Dir ++ "/deps/cowlib")]), - ct:log("~s~n", [os:cmd("ln -s " ++ CommonDir ++ "ranch " ++ Dir ++ "/deps/ranch")]), + ct:log("~ts~n", [os:cmd("mkdir " ++ Dir ++ "/deps")]), + ct:log("~ts~n", [os:cmd("ln -s " ++ CommonDir ++ "cowboy " ++ Dir ++ "/deps/cowboy")]), + ct:log("~ts~n", [os:cmd("ln -s " ++ CommonDir ++ "cowlib " ++ Dir ++ "/deps/cowlib")]), + ct:log("~ts~n", [os:cmd("ln -s " ++ CommonDir ++ "ranch " ++ Dir ++ "/deps/ranch")]), %% TERM=dumb disables relx coloring. - ct:log("~s~n", [os:cmd(Make ++ " -C " ++ Dir ++ " TERM=dumb")]), - ct:log("~s~n", [os:cmd(Rel ++ " stop")]), - ct:log("~s~n", [os:cmd(Rel ++ " start")]), + ct:log("~ts~n", [os:cmd(Make ++ " -C " ++ Dir ++ " TERM=dumb")]), + ct:log("~ts~n", [os:cmd(Rel ++ " stop")]), + ct:log("~ts~n", [os:cmd(Rel ++ " daemon")]), timer:sleep(2000), ok. do_stop(Example) -> {_, Rel, Log} = do_get_paths(Example), - ct:log("~s~n", [os:cmd(Rel ++ " stop")]), - ct:log("~s~n", [element(2, file:read_file(Log))]), + ct:log("~ts~n", [os:cmd(Rel ++ " stop")]), + ct:log("~ts~n", [element(2, file:read_file(Log))]), ok. %% Fetch a response. @@ -372,13 +372,16 @@ file_server(Config) -> do_file_server(Transport, Protocol, Config) -> %% Directory. {200, DirHeaders, <<"<!DOCTYPE html><html>", _/bits >>} = do_get(Transport, Protocol, "/", Config), - {_, <<"text/html">>} = lists:keyfind(<<"content-type">>, 1, DirHeaders), + {_, <<"text/html; charset=utf-8">>} = lists:keyfind(<<"content-type">>, 1, DirHeaders), _ = do_rest_get(Transport, Protocol, "/", <<"application/json">>, undefined, Config), %% Files. {200, _, _} = do_get(Transport, Protocol, "/small.mp4", Config), {200, _, _} = do_get(Transport, Protocol, "/small.ogv", Config), {200, _, _} = do_get(Transport, Protocol, "/test.txt", Config), {200, _, _} = do_get(Transport, Protocol, "/video.html", Config), + {200, _, _} = do_get(Transport, Protocol, + ["/", cow_uri:urlencode(<<"中文"/utf8>>), "/", cow_uri:urlencode(<<"中文.html"/utf8>>)], + Config), ok. %% Markdown middleware. diff --git a/test/h2spec_SUITE.erl b/test/h2spec_SUITE.erl index 08497e9..71a8a41 100644 --- a/test/h2spec_SUITE.erl +++ b/test/h2spec_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above diff --git a/test/handlers/compress_h.erl b/test/handlers/compress_h.erl index 27edbd3..658c834 100644 --- a/test/handlers/compress_h.erl +++ b/test/handlers/compress_h.erl @@ -19,6 +19,9 @@ init(Req0, State=reply) -> <<"content-encoding">> -> cowboy_req:reply(200, #{<<"content-encoding">> => <<"compress">>}, lists:duplicate(100000, $a), Req0); + <<"etag">> -> + cowboy_req:reply(200, #{<<"etag">> => <<"\"STRONK\"">>}, + lists:duplicate(100000, $a), Req0); <<"sendfile">> -> AppFile = code:where_is_file("cowboy.app"), Size = filelib:file_size(AppFile), @@ -34,6 +37,8 @@ init(Req0, State=stream_reply) -> stream_reply(#{}, Req0); <<"content-encoding">> -> stream_reply(#{<<"content-encoding">> => <<"compress">>}, Req0); + <<"etag">> -> + stream_reply(#{<<"etag">> => <<"\"STRONK\"">>}, Req0); <<"sendfile">> -> Data = lists:duplicate(10000, $a), AppFile = code:where_is_file("cowboy.app"), diff --git a/test/handlers/content_types_provided_h.erl b/test/handlers/content_types_provided_h.erl index 5220c19..397026b 100644 --- a/test/handlers/content_types_provided_h.erl +++ b/test/handlers/content_types_provided_h.erl @@ -11,9 +11,14 @@ init(Req, Opts) -> {cowboy_rest, Req, Opts}. +content_types_provided(Req=#{qs := <<"invalid-type">>}, State) -> + ct_helper:ignore(cowboy_rest, normalize_content_types, 2), + {[{{'*', '*', '*'}, get_text_plain}], Req, State}; content_types_provided(Req=#{qs := <<"wildcard-param">>}, State) -> {[{{<<"text">>, <<"plain">>, '*'}, get_text_plain}], Req, State}. +get_text_plain(Req=#{qs := <<"invalid-type">>}, State) -> + {<<"invalid-type">>, Req, State}; get_text_plain(Req=#{qs := <<"wildcard-param">>}, State) -> {_, _, Param} = maps:get(media_type, Req), Body = if diff --git a/test/handlers/crash_h.erl b/test/handlers/crash_h.erl index b687aba..57d4d85 100644 --- a/test/handlers/crash_h.erl +++ b/test/handlers/crash_h.erl @@ -7,6 +7,9 @@ -export([init/2]). -spec init(_, _) -> no_return(). +init(_, external_exit) -> + ct_helper:ignore(?MODULE, init, 2), + exit(self(), ct_helper_ignore); init(_, no_reply) -> ct_helper:ignore(?MODULE, init, 2), error(crash); diff --git a/test/handlers/create_resource_h.erl b/test/handlers/create_resource_h.erl new file mode 100644 index 0000000..f82e610 --- /dev/null +++ b/test/handlers/create_resource_h.erl @@ -0,0 +1,28 @@ +-module(create_resource_h). + +-export([init/2]). +-export([allowed_methods/2]). +-export([resource_exists/2]). +-export([content_types_accepted/2]). +-export([from_text/2]). + +init(Req, Opts) -> + {cowboy_rest, Req, Opts}. + +allowed_methods(Req, State) -> + {[<<"POST">>], Req, State}. + +resource_exists(Req, State) -> + {true, Req, State}. + +content_types_accepted(Req, State) -> + {[{{<<"application">>, <<"text">>, []}, from_text}], Req, State}. + +from_text(Req=#{qs := Qs}, State) -> + NewURI = [cowboy_req:uri(Req), "/foo"], + case Qs of + <<"created">> -> + {{created, NewURI}, Req, State}; + <<"see_other">> -> + {{see_other, NewURI}, Req, State} + end. diff --git a/test/handlers/decompress_h.erl b/test/handlers/decompress_h.erl new file mode 100644 index 0000000..deb6de0 --- /dev/null +++ b/test/handlers/decompress_h.erl @@ -0,0 +1,84 @@ +%% This module echoes a request body of to test +%% the cowboy_decompress_h stream handler. + +-module(decompress_h). + +-export([init/2]). + +init(Req0, State=echo) -> + case cowboy_req:binding(what, Req0) of + <<"decompress_disable">> -> + cowboy_req:cast({set_options, #{decompress_enabled => false}}, Req0); + <<"decompress_ratio_limit">> -> + cowboy_req:cast({set_options, #{decompress_ratio_limit => 0.5}}, Req0); + <<"normal">> -> ok + end, + {ok, Body, Req1} = read_body(Req0), + Req = cowboy_req:reply(200, #{}, Body, Req1), + {ok, Req, State}; +init(Req0, State=test) -> + Req = test(Req0, cowboy_req:binding(what, Req0)), + {ok, Req, State}. + +test(Req, <<"content-encoding">>) -> + cowboy_req:reply(200, #{}, + cowboy_req:header(<<"content-encoding">>, Req, <<"undefined">>), + Req); +test(Req, <<"content-decoded">>) -> + cowboy_req:reply(200, #{}, + io_lib:format("~0p", [maps:get(content_decoded, Req, undefined)]), + Req); +test(Req0, <<"disable-in-the-middle">>) -> + {Status, Data, Req1} = cowboy_req:read_body(Req0, #{length => 1000}), + cowboy_req:cast({set_options, #{decompress_enabled => false}}, Req1), + {ok, Body, Req} = do_read_body(Status, Req1, Data), + cowboy_req:reply(200, #{}, Body, Req); +test(Req0, <<"enable-in-the-middle">>) -> + {Status, Data, Req1} = cowboy_req:read_body(Req0, #{length => 1000}), + cowboy_req:cast({set_options, #{decompress_enabled => true}}, Req1), + {ok, Body, Req} = do_read_body(Status, Req1, Data), + cowboy_req:reply(200, #{}, Body, Req); +test(Req0, <<"header-command">>) -> + {ok, Body, Req1} = read_body(Req0), + Req = cowboy_req:stream_reply(200, #{}, Req1), + cowboy_req:stream_body(Body, fin, Req); +test(Req0, <<"accept-identity">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<"identity">>}, + Body, Req); +test(Req0, <<"invalid-header">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<";">>}, + Body, Req); +test(Req0, <<"reject-explicit-header">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<"identity, gzip;q=0">>}, + Body, Req); +test(Req0, <<"reject-implicit-header">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<"identity, *;q=0">>}, + Body, Req); +test(Req0, <<"accept-explicit-header">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<"identity, gzip;q=0.5">>}, + Body, Req); +test(Req0, <<"accept-implicit-header">>) -> + {ok, Body, Req} = read_body(Req0), + cowboy_req:reply(200, + #{<<"accept-encoding">> => <<"identity, *;q=0.5">>}, + Body, Req). + +read_body(Req0) -> + {Status, Data, Req} = cowboy_req:read_body(Req0, #{length => 1000}), + do_read_body(Status, Req, Data). + +do_read_body(more, Req0, Acc) -> + {Status, Data, Req} = cowboy_req:read_body(Req0), + do_read_body(Status, Req, << Acc/binary, Data/binary >>); +do_read_body(ok, Req, Acc) -> + {ok, Acc, Req}. diff --git a/test/handlers/echo_h.erl b/test/handlers/echo_h.erl index 1b672d1..d04d531 100644 --- a/test/handlers/echo_h.erl +++ b/test/handlers/echo_h.erl @@ -25,6 +25,8 @@ echo(<<"read_body">>, Req0, Opts) -> timer:sleep(500), cowboy_req:read_body(Req0); <<"/full", _/bits>> -> read_body(Req0, <<>>); + <<"/auto-sync", _/bits>> -> read_body_auto_sync(Req0, <<>>); + <<"/auto-async", _/bits>> -> read_body_auto_async(Req0, <<>>); <<"/length", _/bits>> -> {_, _, Req1} = read_body(Req0, <<>>), Length = cowboy_req:body_length(Req1), @@ -84,6 +86,7 @@ echo(<<"match">>, Req, Opts) -> Fields = [binary_to_atom(F, latin1) || F <- Fields0], Value = case Type of <<"qs">> -> cowboy_req:match_qs(Fields, Req); + <<"qs_with_constraints">> -> cowboy_req:match_qs([{id, integer}], Req); <<"cookies">> -> cowboy_req:match_cookies(Fields, Req); <<"body_qs">> -> %% Note that the Req should not be discarded but for the @@ -122,6 +125,25 @@ read_body(Req0, Acc) -> {more, Data, Req} -> read_body(Req, << Acc/binary, Data/binary >>) end. +read_body_auto_sync(Req0, Acc) -> + Opts = #{length => auto, period => infinity}, + case cowboy_req:read_body(Req0, Opts) of + {ok, Data, Req} -> {ok, << Acc/binary, Data/binary >>, Req}; + {more, Data, Req} -> read_body_auto_sync(Req, << Acc/binary, Data/binary >>) + end. + +read_body_auto_async(Req, Acc) -> + read_body_auto_async(Req, make_ref(), Acc). + +read_body_auto_async(Req, ReadBodyRef, Acc) -> + cowboy_req:cast({read_body, self(), ReadBodyRef, auto, infinity}, Req), + receive + {request_body, ReadBodyRef, nofin, Data} -> + read_body_auto_async(Req, ReadBodyRef, <<Acc/binary, Data/binary>>); + {request_body, ReadBodyRef, fin, _, Data} -> + {ok, <<Acc/binary, Data/binary>>, Req} + end. + value_to_iodata(V) when is_integer(V) -> integer_to_binary(V); value_to_iodata(V) when is_atom(V) -> atom_to_binary(V, latin1); value_to_iodata(V) when is_list(V); is_tuple(V); is_map(V) -> io_lib:format("~999999p", [V]); diff --git a/test/handlers/generate_etag_h.erl b/test/handlers/generate_etag_h.erl index 97ee82b..b9e1302 100644 --- a/test/handlers/generate_etag_h.erl +++ b/test/handlers/generate_etag_h.erl @@ -34,6 +34,9 @@ generate_etag(Req=#{qs := <<"binary-weak-unquoted">>}, State) -> generate_etag(Req=#{qs := <<"binary-strong-unquoted">>}, State) -> ct_helper_error_h:ignore(cow_http_hd, parse_etag, 1), {<<"etag-header-value">>, Req, State}; +%% Returning 'undefined' to indicate no etag. +generate_etag(Req=#{qs := <<"undefined">>}, State) -> + {undefined, Req, State}; %% Simulate the callback being missing in other cases. generate_etag(#{qs := <<"missing">>}, _) -> no_call. diff --git a/test/handlers/loop_handler_endless_h.erl b/test/handlers/loop_handler_endless_h.erl new file mode 100644 index 0000000..d8c8ab5 --- /dev/null +++ b/test/handlers/loop_handler_endless_h.erl @@ -0,0 +1,25 @@ +%% This module implements a loop handler that streams endless data. + +-module(loop_handler_endless_h). + +-export([init/2]). +-export([info/3]). + +init(Req0, #{delay := Delay} = Opts) -> + case cowboy_req:header(<<"x-test-pid">>, Req0) of + BinPid when is_binary(BinPid) -> + Pid = list_to_pid(binary_to_list(BinPid)), + Pid ! {Pid, self(), init}, + ok; + _ -> + ok + end, + erlang:send_after(Delay, self(), timeout), + Req = cowboy_req:stream_reply(200, Req0), + {cowboy_loop, Req, Opts}. + +info(timeout, Req, State) -> + cowboy_req:stream_body(<<0:10000/unit:8>>, nofin, Req), + %% Equivalent to a 0 timeout. + self() ! timeout, + {ok, Req, State}. diff --git a/test/handlers/loop_handler_timeout_hibernate_h.erl b/test/handlers/loop_handler_timeout_hibernate_h.erl new file mode 100644 index 0000000..0485208 --- /dev/null +++ b/test/handlers/loop_handler_timeout_hibernate_h.erl @@ -0,0 +1,30 @@ +%% This module implements a loop handler that first +%% sets a timeout, then hibernates, then ensures +%% that the timeout initially set no longer triggers. +%% If everything goes fine a 200 is returned. If the +%% timeout triggers again a 299 is. + +-module(loop_handler_timeout_hibernate_h). + +-export([init/2]). +-export([info/3]). +-export([terminate/3]). + +init(Req, _) -> + self() ! message1, + {cowboy_loop, Req, undefined, 100}. + +info(message1, Req, State) -> + erlang:send_after(200, self(), message2), + {ok, Req, State, hibernate}; +info(message2, Req, State) -> + erlang:send_after(200, self(), message3), + %% Don't set a timeout now. + {ok, Req, State}; +info(message3, Req, State) -> + {stop, cowboy_req:reply(200, Req), State}; +info(timeout, Req, State) -> + {stop, cowboy_req:reply(<<"299 OK!">>, Req), State}. + +terminate(stop, _, _) -> + ok. diff --git a/test/handlers/loop_handler_timeout_info_h.erl b/test/handlers/loop_handler_timeout_info_h.erl new file mode 100644 index 0000000..7a1ccba --- /dev/null +++ b/test/handlers/loop_handler_timeout_info_h.erl @@ -0,0 +1,23 @@ +%% This module implements a loop handler that changes +%% the timeout value to 500ms after the first message +%% then sends itself another message after 1000ms. +%% It is expected to timeout, that is, reply a 299. + +-module(loop_handler_timeout_info_h). + +-export([init/2]). +-export([info/3]). +-export([terminate/3]). + +init(Req, _) -> + self() ! message, + {cowboy_loop, Req, undefined}. + +info(message, Req, State) -> + erlang:send_after(500, self(), message), + {ok, Req, State, 100}; +info(timeout, Req, State) -> + {stop, cowboy_req:reply(<<"299 OK!">>, Req), State}. + +terminate(stop, _, _) -> + ok. diff --git a/test/handlers/loop_handler_timeout_init_h.erl b/test/handlers/loop_handler_timeout_init_h.erl new file mode 100644 index 0000000..7908fda --- /dev/null +++ b/test/handlers/loop_handler_timeout_init_h.erl @@ -0,0 +1,23 @@ +%% This module implements a loop handler that reads +%% the request query for a timeout value, then sends +%% itself a message after 1000ms. It replies a 200 when +%% the message does not timeout and a 299 otherwise. + +-module(loop_handler_timeout_init_h). + +-export([init/2]). +-export([info/3]). +-export([terminate/3]). + +init(Req, _) -> + #{timeout := Timeout} = cowboy_req:match_qs([{timeout, int}], Req), + erlang:send_after(500, self(), message), + {cowboy_loop, Req, undefined, Timeout}. + +info(message, Req, State) -> + {stop, cowboy_req:reply(200, Req), State}; +info(timeout, Req, State) -> + {stop, cowboy_req:reply(<<"299 OK!">>, Req), State}. + +terminate(stop, _, _) -> + ok. diff --git a/test/handlers/read_body_h.erl b/test/handlers/read_body_h.erl new file mode 100644 index 0000000..a0de3b3 --- /dev/null +++ b/test/handlers/read_body_h.erl @@ -0,0 +1,15 @@ +%% This module reads the request body fully and send a 204 response. + +-module(read_body_h). + +-export([init/2]). + +init(Req0, Opts) -> + {ok, Req} = read_body(Req0), + {ok, cowboy_req:reply(200, #{}, Req), Opts}. + +read_body(Req0) -> + case cowboy_req:read_body(Req0) of + {ok, _, Req} -> {ok, Req}; + {more, _, Req} -> read_body(Req) + end. diff --git a/test/handlers/resp_h.erl b/test/handlers/resp_h.erl index 8031d0e..d1c46e0 100644 --- a/test/handlers/resp_h.erl +++ b/test/handlers/resp_h.erl @@ -30,6 +30,10 @@ do(<<"set_resp_cookie4">>, Req0, Opts) -> do(<<"set_resp_header">>, Req0, Opts) -> Req = cowboy_req:set_resp_header(<<"content-type">>, <<"text/plain">>, Req0), {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; +do(<<"set_resp_header_cookie">>, Req0, Opts) -> + ct_helper:ignore(cowboy_req, set_resp_header, 3), + Req = cowboy_req:set_resp_header(<<"set-cookie">>, <<"name=value">>, Req0), + {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; do(<<"set_resp_header_server">>, Req0, Opts) -> Req = cowboy_req:set_resp_header(<<"server">>, <<"nginx">>, Req0), {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; @@ -39,6 +43,27 @@ do(<<"set_resp_headers">>, Req0, Opts) -> <<"content-encoding">> => <<"compress">> }, Req0), {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; +do(<<"set_resp_headers_list">>, Req0, Opts) -> + Req = cowboy_req:set_resp_headers([ + {<<"content-type">>, <<"text/plain">>}, + {<<"test-header">>, <<"one">>}, + {<<"content-encoding">>, <<"compress">>}, + {<<"test-header">>, <<"two">>} + ], Req0), + {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; +do(<<"set_resp_headers_cookie">>, Req0, Opts) -> + ct_helper:ignore(cowboy_req, set_resp_headers, 2), + Req = cowboy_req:set_resp_headers(#{ + <<"set-cookie">> => <<"name=value">> + }, Req0), + {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; +do(<<"set_resp_headers_list_cookie">>, Req0, Opts) -> + ct_helper:ignore(cowboy_req, set_resp_headers_list, 3), + Req = cowboy_req:set_resp_headers([ + {<<"set-cookie">>, <<"name=value">>}, + {<<"set-cookie">>, <<"name2=value2">>} + ], Req0), + {ok, cowboy_req:reply(200, #{}, "OK", Req), Opts}; do(<<"set_resp_headers_http11">>, Req0, Opts) -> Req = cowboy_req:set_resp_headers(#{ <<"connection">> => <<"custom-header, close">>, @@ -130,6 +155,10 @@ do(<<"inform2">>, Req0, Opts) -> <<"twice">> -> cowboy_req:inform(102, Req0), cowboy_req:inform(102, Req0); + <<"after_reply">> -> + ct_helper:ignore(cowboy_req, inform, 3), + Req1 = cowboy_req:reply(200, Req0), + cowboy_req:inform(102, Req1); Status -> cowboy_req:inform(binary_to_integer(Status), Req0) end, @@ -143,9 +172,16 @@ do(<<"inform3">>, Req0, Opts) -> <<"error">> -> ct_helper:ignore(cowboy_req, inform, 3), cowboy_req:inform(ok, Headers, Req0); + <<"set_cookie">> -> + ct_helper:ignore(cowboy_req, inform, 3), + cowboy_req:inform(102, #{<<"set-cookie">> => <<"name=value">>}, Req0); <<"twice">> -> cowboy_req:inform(102, Headers, Req0), cowboy_req:inform(102, Headers, Req0); + <<"after_reply">> -> + ct_helper:ignore(cowboy_req, inform, 3), + Req1 = cowboy_req:reply(200, Req0), + cowboy_req:inform(102, Headers, Req1); Status -> cowboy_req:inform(binary_to_integer(Status), Headers, Req0) end, @@ -161,6 +197,7 @@ do(<<"reply2">>, Req0, Opts) -> <<"twice">> -> ct_helper:ignore(cowboy_req, reply, 4), Req1 = cowboy_req:reply(200, Req0), + timer:sleep(100), cowboy_req:reply(200, Req1); Status -> cowboy_req:reply(binary_to_integer(Status), Req0) @@ -171,6 +208,9 @@ do(<<"reply3">>, Req0, Opts) -> <<"error">> -> ct_helper:ignore(cowboy_req, reply, 4), cowboy_req:reply(200, ok, Req0); + <<"set_cookie">> -> + ct_helper:ignore(cowboy_req, reply, 4), + cowboy_req:reply(200, #{<<"set-cookie">> => <<"name=value">>}, Req0); Status -> cowboy_req:reply(binary_to_integer(Status), #{<<"content-type">> => <<"text/plain">>}, Req0) @@ -181,11 +221,14 @@ do(<<"reply4">>, Req0, Opts) -> <<"error">> -> ct_helper:ignore(erlang, iolist_size, 1), cowboy_req:reply(200, #{}, ok, Req0); - <<"204body">> -> + <<"set_cookie">> -> ct_helper:ignore(cowboy_req, reply, 4), + cowboy_req:reply(200, #{<<"set-cookie">> => <<"name=value">>}, <<"OK">>, Req0); + <<"204body">> -> + ct_helper:ignore(cowboy_req, do_reply_ensure_no_body, 4), cowboy_req:reply(204, #{}, <<"OK">>, Req0); <<"304body">> -> - ct_helper:ignore(cowboy_req, reply, 4), + ct_helper:ignore(cowboy_req, do_reply_ensure_no_body, 4), cowboy_req:reply(304, #{}, <<"OK">>, Req0); Status -> cowboy_req:reply(binary_to_integer(Status), #{}, <<"OK">>, Req0) @@ -215,6 +258,14 @@ do(<<"stream_reply2">>, Req0, Opts) -> Req = cowboy_req:stream_reply(304, Req0), stream_body(Req), {ok, Req, Opts}; + <<"twice">> -> + ct_helper:ignore(cowboy_req, stream_reply, 3), + Req1 = cowboy_req:stream_reply(200, Req0), + timer:sleep(100), + %% We will crash here so the body shouldn't be sent. + Req = cowboy_req:stream_reply(200, Req1), + stream_body(Req), + {ok, Req, Opts}; Status -> Req = cowboy_req:stream_reply(binary_to_integer(Status), Req0), stream_body(Req), @@ -225,6 +276,9 @@ do(<<"stream_reply3">>, Req0, Opts) -> <<"error">> -> ct_helper:ignore(cowboy_req, stream_reply, 3), cowboy_req:stream_reply(200, ok, Req0); + <<"set_cookie">> -> + ct_helper:ignore(cowboy_req, stream_reply, 3), + cowboy_req:stream_reply(200, #{<<"set-cookie">> => <<"name=value">>}, Req0); Status -> cowboy_req:stream_reply(binary_to_integer(Status), #{<<"content-type">> => <<"text/plain">>}, Req0) @@ -380,6 +434,16 @@ do(<<"stream_trailers">>, Req0, Opts) -> <<"grpc-status">> => <<"0">> }, Req), {ok, Req, Opts}; + <<"set_cookie">> -> + ct_helper:ignore(cowboy_req, stream_trailers, 2), + Req = cowboy_req:stream_reply(200, #{ + <<"trailer">> => <<"set-cookie">> + }, Req0), + cowboy_req:stream_body(<<"Hello world!">>, nofin, Req), + cowboy_req:stream_trailers(#{ + <<"set-cookie">> => <<"name=value">> + }, Req), + {ok, Req, Opts}; _ -> Req = cowboy_req:stream_reply(200, #{ <<"trailer">> => <<"grpc-status">> @@ -403,6 +467,11 @@ do(<<"push">>, Req, Opts) -> <<"qs">> -> cowboy_req:push("/static/style.css", #{<<"accept">> => <<"text/css">>}, Req, #{qs => <<"server=cowboy&version=2.0">>}); + <<"after_reply">> -> + ct_helper:ignore(cowboy_req, push, 4), + Req1 = cowboy_req:reply(200, Req), + %% We will crash here so no need to worry about propagating Req1. + cowboy_req:push("/static/style.css", #{<<"accept">> => <<"text/css">>}, Req1); _ -> cowboy_req:push("/static/style.css", #{<<"accept">> => <<"text/css">>}, Req), %% The text/plain mime is not defined by default, so a 406 will be returned. diff --git a/test/handlers/stream_handler_h.erl b/test/handlers/stream_handler_h.erl index 370d15a..7a1e5ec 100644 --- a/test/handlers/stream_handler_h.erl +++ b/test/handlers/stream_handler_h.erl @@ -44,16 +44,16 @@ init_commands(_, _, #state{test=set_options_ignore_unknown}) -> ]; init_commands(_, _, State=#state{test=shutdown_on_stream_stop}) -> Spawn = init_process(false, State), - [{headers, 200, #{}}, {spawn, Spawn, 5000}, stop]; + [{spawn, Spawn, 5000}, {headers, 200, #{}}, stop]; init_commands(_, _, State=#state{test=shutdown_on_socket_close}) -> Spawn = init_process(false, State), - [{headers, 200, #{}}, {spawn, Spawn, 5000}]; + [{spawn, Spawn, 5000}, {headers, 200, #{}}]; init_commands(_, _, State=#state{test=shutdown_timeout_on_stream_stop}) -> Spawn = init_process(true, State), - [{headers, 200, #{}}, {spawn, Spawn, 2000}, stop]; + [{spawn, Spawn, 2000}, {headers, 200, #{}}, stop]; init_commands(_, _, State=#state{test=shutdown_timeout_on_socket_close}) -> Spawn = init_process(true, State), - [{headers, 200, #{}}, {spawn, Spawn, 2000}]; + [{spawn, Spawn, 2000}, {headers, 200, #{}}]; init_commands(_, _, State=#state{test=switch_protocol_after_headers}) -> [{headers, 200, #{}}, {switch_protocol, #{}, ?MODULE, State}]; init_commands(_, _, State=#state{test=switch_protocol_after_headers_data}) -> diff --git a/test/handlers/stream_hello_h.erl b/test/handlers/stream_hello_h.erl new file mode 100644 index 0000000..e67e220 --- /dev/null +++ b/test/handlers/stream_hello_h.erl @@ -0,0 +1,15 @@ +%% This module is the fastest way of producing a Hello world! + +-module(stream_hello_h). + +-export([init/3]). +-export([terminate/3]). + +init(_, _, State) -> + {[ + {response, 200, #{<<"content-length">> => <<"12">>}, <<"Hello world!">>}, + stop + ], State}. + +terminate(_, _, _) -> + ok. diff --git a/test/handlers/streamed_result_h.erl b/test/handlers/streamed_result_h.erl new file mode 100644 index 0000000..ea6f492 --- /dev/null +++ b/test/handlers/streamed_result_h.erl @@ -0,0 +1,20 @@ +-module(streamed_result_h). + +-export([init/2]). + +init(Req, Opts) -> + N = list_to_integer(binary_to_list(cowboy_req:binding(n, Req))), + Interval = list_to_integer(binary_to_list(cowboy_req:binding(interval, Req))), + chunked(N, Interval, Req, Opts). + +chunked(N, Interval, Req0, Opts) -> + Req = cowboy_req:stream_reply(200, Req0), + {ok, loop(N, Interval, Req), Opts}. + +loop(0, _Interval, Req) -> + ok = cowboy_req:stream_body("Finished!\n", fin, Req), + Req; +loop(N, Interval, Req) -> + ok = cowboy_req:stream_body(iolist_to_binary([integer_to_list(N), <<"\n">>]), nofin, Req), + timer:sleep(Interval), + loop(N-1, Interval, Req). diff --git a/test/handlers/ws_ignore.erl b/test/handlers/ws_ignore.erl new file mode 100644 index 0000000..9fe3322 --- /dev/null +++ b/test/handlers/ws_ignore.erl @@ -0,0 +1,20 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(ws_ignore). + +-export([init/2]). +-export([websocket_handle/2]). +-export([websocket_info/2]). + +init(Req, _) -> + {cowboy_websocket, Req, undefined, #{ + compress => true + }}. + +websocket_handle({text, <<"CHECK">>}, State) -> + {[{text, <<"CHECK">>}], State}; +websocket_handle(_Frame, State) -> + {[], State}. + +websocket_info(_Info, State) -> + {[], State}. diff --git a/test/handlers/ws_init_h.erl b/test/handlers/ws_init_h.erl index db5307b..bbe9ef9 100644 --- a/test/handlers/ws_init_h.erl +++ b/test/handlers/ws_init_h.erl @@ -36,7 +36,10 @@ do_websocket_init(State=reply_many_hibernate) -> do_websocket_init(State=reply_many_close) -> {[{text, "Hello"}, close], State}; do_websocket_init(State=reply_many_close_hibernate) -> - {[{text, "Hello"}, close], State, hibernate}. + {[{text, "Hello"}, close], State, hibernate}; +do_websocket_init(State=reply_trap_exit) -> + Text = "trap_exit: " ++ atom_to_list(element(2, process_info(self(), trap_exit))), + {[{text, Text}, close], State, hibernate}. websocket_handle(_, State) -> {[], State}. diff --git a/test/handlers/ws_ping_h.erl b/test/handlers/ws_ping_h.erl new file mode 100644 index 0000000..a5848fe --- /dev/null +++ b/test/handlers/ws_ping_h.erl @@ -0,0 +1,23 @@ +%% This module sends an empty ping to the client and +%% waits for a pong before sending a text frame. It +%% is used to confirm server-initiated pings work. + +-module(ws_ping_h). +-behavior(cowboy_websocket). + +-export([init/2]). +-export([websocket_init/1]). +-export([websocket_handle/2]). +-export([websocket_info/2]). + +init(Req, _) -> + {cowboy_websocket, Req, undefined}. + +websocket_init(State) -> + {[{ping, <<>>}], State}. + +websocket_handle(pong, State) -> + {[{text, <<"OK!!">>}], State}. + +websocket_info(_, State) -> + {[], State}. diff --git a/test/handlers/ws_set_options_commands_h.erl b/test/handlers/ws_set_options_commands_h.erl index 88d4e72..1ab0af4 100644 --- a/test/handlers/ws_set_options_commands_h.erl +++ b/test/handlers/ws_set_options_commands_h.erl @@ -11,10 +11,21 @@ init(Req, RunOrHibernate) -> {cowboy_websocket, Req, RunOrHibernate, #{idle_timeout => infinity}}. -websocket_handle(Frame={text, <<"idle_timeout_short">>}, State=run) -> - {[{set_options, #{idle_timeout => 500}}, Frame], State}; -websocket_handle(Frame={text, <<"idle_timeout_short">>}, State=hibernate) -> - {[{set_options, #{idle_timeout => 500}}, Frame], State, hibernate}. +%% Set the idle_timeout option dynamically. +websocket_handle({text, <<"idle_timeout_short">>}, State=run) -> + {[{set_options, #{idle_timeout => 500}}], State}; +websocket_handle({text, <<"idle_timeout_short">>}, State=hibernate) -> + {[{set_options, #{idle_timeout => 500}}], State, hibernate}; +%% Set the max_frame_size option dynamically. +websocket_handle({text, <<"max_frame_size_small">>}, State=run) -> + {[{set_options, #{max_frame_size => 1000}}], State}; +websocket_handle({text, <<"max_frame_size_small">>}, State=hibernate) -> + {[{set_options, #{max_frame_size => 1000}}], State, hibernate}; +%% We just echo binary frames. +websocket_handle(Frame={binary, _}, State=run) -> + {[Frame], State}; +websocket_handle(Frame={binary, _}, State=hibernate) -> + {[Frame], State, hibernate}. websocket_info(_Info, State) -> {[], State}. diff --git a/test/handlers/wt_echo_h.erl b/test/handlers/wt_echo_h.erl new file mode 100644 index 0000000..5198565 --- /dev/null +++ b/test/handlers/wt_echo_h.erl @@ -0,0 +1,103 @@ +%% This module echoes client events back, +%% including creating new streams. + +-module(wt_echo_h). +-behavior(cowboy_webtransport). + +-export([init/2]). +-export([webtransport_handle/2]). +-export([webtransport_info/2]). +-export([terminate/3]). + +%% -define(DEBUG, 1). +-ifdef(DEBUG). +-define(LOG(Fmt, Args), ct:pal(Fmt, Args)). +-else. +-define(LOG(Fmt, Args), _ = Fmt, _ = Args, ok). +-endif. + +init(Req0, _) -> + ?LOG("WT init ~p~n", [Req0]), + Req = case cowboy_req:parse_header(<<"wt-available-protocols">>, Req0) of + undefined -> + Req0; + [Protocol|_] -> + cowboy_req:set_resp_header(<<"wt-protocol">>, cow_http_hd:wt_protocol(Protocol), Req0) + end, + {cowboy_webtransport, Req, #{}}. + +webtransport_handle(Event = {stream_open, StreamID, bidi}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + {[], Streams#{StreamID => bidi}}; +webtransport_handle(Event = {stream_open, StreamID, unidi}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + OpenStreamRef = make_ref(), + {[{open_stream, OpenStreamRef, unidi, <<>>}], Streams#{ + StreamID => {unidi_remote, OpenStreamRef}, + OpenStreamRef => {unidi_local, StreamID}}}; +webtransport_handle(Event = {opened_stream_id, OpenStreamRef, OpenStreamID}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + case Streams of + #{OpenStreamRef := bidi} -> + {[], maps:remove(OpenStreamRef, Streams#{ + OpenStreamID => bidi + })}; + #{OpenStreamRef := {unidi_local, RemoteStreamID}} -> + #{RemoteStreamID := {unidi_remote, OpenStreamRef}} = Streams, + {[], maps:remove(OpenStreamRef, Streams#{ + RemoteStreamID => {unidi_remote, OpenStreamID}, + OpenStreamID => {unidi_local, RemoteStreamID} + })} + end; +webtransport_handle(Event = {stream_data, StreamID, _IsFin, <<"TEST:", Test/bits>>}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + case Test of + <<"open_bidi">> -> + OpenStreamRef = make_ref(), + {[{open_stream, OpenStreamRef, bidi, <<>>}], + Streams#{OpenStreamRef => bidi}}; + <<"initiate_close">> -> + {[initiate_close], Streams}; + <<"close">> -> + {[close], Streams}; + <<"close_app_code">> -> + {[{close, 1234567890}], Streams}; + <<"close_app_code_msg">> -> + {[{close, 1234567890, <<"onetwothreefourfivesixseveneightnineten">>}], Streams}; + <<"event_pid:", EventPidBin/bits>> -> + {[{send, StreamID, nofin, <<"event_pid_received">>}], + Streams#{event_pid => binary_to_term(EventPidBin)}} + end; +webtransport_handle(Event = {stream_data, StreamID, IsFin, Data}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + case Streams of + #{StreamID := bidi} -> + {[{send, StreamID, IsFin, Data}], Streams}; + #{StreamID := {unidi_remote, Ref}} when is_reference(Ref) -> + %% The stream isn't ready. We try again later. + erlang:send_after(100, self(), {try_again, Event}), + {[], Streams}; + #{StreamID := {unidi_remote, LocalStreamID}} -> + {[{send, LocalStreamID, IsFin, Data}], Streams} + end; +webtransport_handle(Event = {datagram, Data}, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + {[{send, datagram, Data}], Streams}; +webtransport_handle(Event = close_initiated, Streams) -> + ?LOG("WT handle ~p~n", [Event]), + {[{send, datagram, <<"TEST:close_initiated">>}], Streams}; +webtransport_handle(Event, Streams) -> + ?LOG("WT handle ignore ~p~n", [Event]), + {[], Streams}. + +webtransport_info({try_again, Event}, Streams) -> + ?LOG("WT try_again ~p", [Event]), + webtransport_handle(Event, Streams). + +terminate(Reason, Req, State=#{event_pid := EventPid}) -> + ?LOG("WT terminate ~0p~n~0p~n~0p", [Reason, Req, State]), + EventPid ! {'$wt_echo_h', terminate, Reason, Req, State}, + ok; +terminate(Reason, Req, State) -> + ?LOG("WT terminate ~0p~n~0p~n~0p", [Reason, Req, State]), + ok. diff --git a/test/http2_SUITE.erl b/test/http2_SUITE.erl index fe6325d..6f2d020 100644 --- a/test/http2_SUITE.erl +++ b/test/http2_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -29,7 +29,8 @@ init_dispatch(_) -> cowboy_router:compile([{"localhost", [ {"/", hello_h, []}, {"/echo/:key", echo_h, []}, - {"/resp_iolist_body", resp_iolist_body_h, []} + {"/resp_iolist_body", resp_iolist_body_h, []}, + {"/streamed_result/:n/:interval", streamed_result_h, []} ]}]). %% Do a prior knowledge handshake (function originally copied from rfc7540_SUITE). @@ -37,7 +38,8 @@ do_handshake(Config) -> do_handshake(#{}, Config). do_handshake(Settings, Config) -> - {ok, Socket} = gen_tcp:connect("localhost", config(port, Config), [binary, {active, false}]), + {ok, Socket} = gen_tcp:connect("localhost", config(port, Config), + [binary, {active, false}|proplists:get_value(tcp_opts, Config, [])]), %% Send a valid preface. ok = gen_tcp:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", cow_http2:settings(Settings)]), %% Receive the server preface. @@ -49,6 +51,27 @@ do_handshake(Settings, Config) -> {ok, << 0:24, 4:8, 1:8, 0:32 >>} = gen_tcp:recv(Socket, 9, 1000), {ok, Socket}. +hibernate(Config) -> + doc("Ensure that we can enable hibernation for HTTP/1.1 connections."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + hibernate => true + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http2}, {port, Port}|Config]), + {ok, http2} = gun:await_up(ConnPid), + StreamRef1 = gun:get(ConnPid, "/"), + StreamRef2 = gun:get(ConnPid, "/"), + StreamRef3 = gun:get(ConnPid, "/"), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef1), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef2), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef3), + gun:close(ConnPid) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + idle_timeout(Config) -> doc("Terminate when the idle timeout is reached."), ProtoOpts = #{ @@ -61,7 +84,8 @@ idle_timeout(Config) -> {ok, Socket} = do_handshake([{port, Port}|Config]), timer:sleep(1000), %% Receive a GOAWAY frame back with NO_ERROR. - {ok, << _:24, 7:8, _:72, 0:32 >>} = gen_tcp:recv(Socket, 17, 1000) + {ok, << _:24, 7:8, _:72, 0:32 >>} = gen_tcp:recv(Socket, 17, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -78,7 +102,8 @@ idle_timeout_infinity(Config) -> {ok, Socket} = do_handshake([{port, Port}|Config]), timer:sleep(1000), %% Don't receive a GOAWAY frame. - {error, timeout} = gen_tcp:recv(Socket, 17, 1000) + {error, timeout} = gen_tcp:recv(Socket, 17, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -107,11 +132,21 @@ idle_timeout_reset_on_data(Config) -> {ok, <<8:24, 6:8, 0:7, 1:1, 0:96>>} = gen_tcp:recv(Socket, 17, 1000), %% The connection goes away soon after we stop sending data. timer:sleep(1000), - {ok, << _:24, 7:8, _:72, 0:32 >>} = gen_tcp:recv(Socket, 17, 1000) + {ok, << _:24, 7:8, _:72, 0:32 >>} = gen_tcp:recv(Socket, 17, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. +idle_timeout_on_send(Config) -> + doc("Ensure the idle timeout is not reset when sending (by default)."), + http_SUITE:do_idle_timeout_on_send(Config, http2). + +idle_timeout_reset_on_send(Config) -> + doc("Ensure the reset_idle_timeout_on_send results in the " + "idle timeout resetting when sending ."), + http_SUITE:do_idle_timeout_reset_on_send(Config, http2). + inactivity_timeout(Config) -> doc("Terminate when the inactivity timeout is reached."), ProtoOpts = #{ @@ -124,7 +159,8 @@ inactivity_timeout(Config) -> {ok, Socket} = do_handshake([{port, Port}|Config]), receive after 1000 -> ok end, %% Receive a GOAWAY frame back with an INTERNAL_ERROR. - {ok, << _:24, 7:8, _:72, 2:32 >>} = gen_tcp:recv(Socket, 17, 1000) + {ok, << _:24, 7:8, _:72, 2:32 >>} = gen_tcp:recv(Socket, 17, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -148,7 +184,8 @@ initial_connection_window_size(Config) -> {ok, << 4:8, 0:40, _:Len/binary >>} = gen_tcp:recv(Socket, 6 + Len, 1000), %% Receive a WINDOW_UPDATE frame incrementing the connection window to 100000. {ok, <<4:24, 8:8, 0:41, Size:31>>} = gen_tcp:recv(Socket, 13, 1000), - ConfiguredSize = Size + 65535 + ConfiguredSize = Size + 65535, + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -191,7 +228,8 @@ max_frame_size_sent(Config) -> %% The DATA frames following must have lengths of 20000 %% and then 10000 due to the limit. {ok, <<20000:24, 0:8, _:40, _:20000/unit:8>>} = gen_tcp:recv(Socket, 20009, 6000), - {ok, <<10000:24, 0:8, _:40, _:10000/unit:8>>} = gen_tcp:recv(Socket, 10009, 6000) + {ok, <<10000:24, 0:8, _:40, _:10000/unit:8>>} = gen_tcp:recv(Socket, 10009, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -235,7 +273,7 @@ preface_timeout_infinity(Config) -> {'DOWN', Ref, process, Pid, Reason} -> error(Reason) after 1000 -> - ok + gen_tcp:close(Socket) end after cowboy:stop_listener(?FUNCTION_NAME) @@ -279,7 +317,7 @@ settings_timeout_infinity(Config) -> {'DOWN', Ref, process, Pid, Reason} -> error(Reason) after 1000 -> - ok + gen_tcp:close(Socket) end after cowboy:stop_listener(?FUNCTION_NAME) @@ -365,6 +403,10 @@ graceful_shutdown_timeout(Config) -> graceful_shutdown_listener(Config) -> doc("Check that connections are shut down gracefully when stopping a listener."), + TransOpts = #{ + socket_opts => [{port, 0}], + shutdown => 1000 %% Shorter timeout to make the test case faster. + }, Dispatch = cowboy_router:compile([{"localhost", [ {"/delay_hello", delay_hello_h, #{delay => 500, notify_received => self()}} @@ -372,13 +414,15 @@ graceful_shutdown_listener(Config) -> ProtoOpts = #{ env => #{dispatch => Dispatch} }, - {ok, Listener} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], ProtoOpts), + {ok, Listener} = cowboy:start_clear(?FUNCTION_NAME, TransOpts, ProtoOpts), Port = ranch:get_port(?FUNCTION_NAME), ConnPid = gun_open([{type, tcp}, {protocol, http2}, {port, Port}|Config]), Ref = gun:get(ConnPid, "/delay_hello"), %% Shutdown listener while the handlers are working. receive {request_received, <<"/delay_hello">>} -> ok end, ListenerMonitorRef = monitor(process, Listener), + %% Note: This call does not complete quickly and will + %% prevent other cowboy:stop_listener/1 calls to complete. ok = cowboy:stop_listener(?FUNCTION_NAME), receive {'DOWN', ListenerMonitorRef, process, Listener, _Reason} -> @@ -392,6 +436,10 @@ graceful_shutdown_listener(Config) -> graceful_shutdown_listener_timeout(Config) -> doc("Check that connections are shut down when gracefully stopping a listener times out."), + TransOpts = #{ + socket_opts => [{port, 0}], + shutdown => 1000 %% Shorter timeout to make the test case faster. + }, Dispatch = cowboy_router:compile([{"localhost", [ {"/long_delay_hello", delay_hello_h, #{delay => 10000, notify_received => self()}} @@ -401,13 +449,15 @@ graceful_shutdown_listener_timeout(Config) -> goaway_initial_timeout => 200, goaway_complete_timeout => 500 }, - {ok, Listener} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], ProtoOpts), + {ok, Listener} = cowboy:start_clear(?FUNCTION_NAME, TransOpts, ProtoOpts), Port = ranch:get_port(?FUNCTION_NAME), ConnPid = gun_open([{type, tcp}, {protocol, http2}, {port, Port}|Config]), Ref = gun:get(ConnPid, "/long_delay_hello"), %% Shutdown listener while the handlers are working. receive {request_received, <<"/long_delay_hello">>} -> ok end, ListenerMonitorRef = monitor(process, Listener), + %% Note: This call does not complete quickly and will + %% prevent other cowboy:stop_listener/1 calls to complete. ok = cowboy:stop_listener(?FUNCTION_NAME), receive {'DOWN', ListenerMonitorRef, process, Listener, _Reason} -> @@ -416,3 +466,73 @@ graceful_shutdown_listener_timeout(Config) -> %% Check that the slow request is aborted. {error, {stream_error, closed}} = gun:await(ConnPid, Ref), gun:close(ConnPid). + +send_timeout_close(Config) -> + doc("Check that connections are closed on send timeout."), + TransOpts = #{ + socket_opts => [ + {port, 0}, + {send_timeout, 100}, + {send_timeout_close, true}, + {sndbuf, 10} + ] + }, + Dispatch = cowboy_router:compile([{"localhost", [ + {"/endless", loop_handler_endless_h, #{delay => 100}} + ]}]), + ProtoOpts = #{ + env => #{dispatch => Dispatch}, + idle_timeout => infinity + }, + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, TransOpts, ProtoOpts), + Port = ranch:get_port(?FUNCTION_NAME), + try + %% Connect a client that sends a request and waits indefinitely. + {ok, ClientSocket} = do_handshake([{port, Port}, + {tcp_opts, [{recbuf, 10}, {buffer, 10}, {active, false}]}|Config]), + {HeadersBlock, _} = cow_hpack:encode([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<":path">>, <<"/endless">>}, + {<<"x-test-pid">>, pid_to_list(self())} + ]), + ok = gen_tcp:send(ClientSocket, [ + cow_http2:headers(1, fin, HeadersBlock), + %% Greatly increase the window to make sure we don't run + %% out of space before we get send timeouts. + cow_http2:window_update(10000000), + cow_http2:window_update(1, 10000000) + ]), + %% Wait for the handler to start then get its pid, + %% the remote connection's pid and socket. + StreamPid = receive + {Self, StreamPid0, init} when Self =:= self() -> + StreamPid0 + after 1000 -> + error(timeout) + end, + ServerPid = ct_helper:get_remote_pid_tcp(ClientSocket), + {links, ServerLinks} = process_info(ServerPid, links), + [ServerSocket] = [PidOrPort || PidOrPort <- ServerLinks, is_port(PidOrPort)], + %% Poll the socket repeatedly until it is closed by the server. + WaitClosedFun = + fun F(T) when T =< 0 -> + error({status, prim_inet:getstatus(ServerSocket)}); + F(T) -> + Snooze = 100, + case inet:sockname(ServerSocket) of + {error, _} -> + timer:sleep(Snooze); + {ok, _} -> + timer:sleep(Snooze), + F(T - Snooze) + end + end, + ok = WaitClosedFun(2000), + false = erlang:is_process_alive(StreamPid), + false = erlang:is_process_alive(ServerPid), + gen_tcp:close(ClientSocket) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. diff --git a/test/http_SUITE.erl b/test/http_SUITE.erl index d0c92e4..9928136 100644 --- a/test/http_SUITE.erl +++ b/test/http_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -24,12 +24,20 @@ -import(cowboy_test, [raw_open/1]). -import(cowboy_test, [raw_send/2]). -import(cowboy_test, [raw_recv_head/1]). +-import(cowboy_test, [raw_recv_rest/3]). -import(cowboy_test, [raw_recv/3]). -import(cowboy_test, [raw_expect_recv/2]). -all() -> [{group, clear}]. +all() -> + [{group, clear_no_parallel}, {group, clear}]. -groups() -> [{clear, [parallel], ct_helper:all(?MODULE)}]. +groups() -> + [ + %% cowboy:stop_listener can be slow when called many times + %% in parallel so we must run this test separately from the others. + {clear_no_parallel, [], [graceful_shutdown_listener]}, + {clear, [parallel], ct_helper:all(?MODULE) -- [graceful_shutdown_listener]} + ]. init_per_group(Name, Config) -> cowboy_test:init_http(Name, #{ @@ -42,9 +50,11 @@ end_per_group(Name, _) -> init_dispatch(_) -> cowboy_router:compile([{"localhost", [ {"/", hello_h, []}, + {"/delay_hello", delay_hello_h, #{delay => 1000, notify_received => self()}}, {"/echo/:key", echo_h, []}, {"/resp/:key[/:arg]", resp_h, []}, - {"/set_options/:key", set_options_h, []} + {"/set_options/:key", set_options_h, []}, + {"/streamed_result/:n/:interval", streamed_result_h, []} ]}]). chunked_false(Config) -> @@ -88,7 +98,7 @@ chunked_one_byte_at_a_time(Config) -> "Transfer-encoding: chunked\r\n\r\n"), _ = [begin raw_send(Client, <<C>>), - timer:sleep(10) + timer:sleep(1) end || <<C>> <= ChunkedBody], Rest = case catch raw_recv_head(Client) of {'EXIT', _} -> error(closed); @@ -196,6 +206,94 @@ do_chunked_body(ChunkSize0, Data, Acc) -> do_chunked_body(ChunkSize, Rest, [iolist_to_binary(cow_http_te:chunk(Chunk))|Acc]). +disable_http1_tls(Config) -> + doc("Ensure that we can disable HTTP/1.1 over TLS (force HTTP/2)."), + TlsOpts = ct_helper:get_certs_from_ets(), + {ok, _} = cowboy:start_tls(?FUNCTION_NAME, TlsOpts ++ [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + alpn_default_protocol => http2 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + {ok, Socket} = ssl:connect("localhost", Port, + [binary, {active, false}|TlsOpts]), + %% ALPN was not negotiated but we're still over HTTP/2. + {error, protocol_not_negotiated} = ssl:negotiated_protocol(Socket), + %% Send a valid preface. + ok = ssl:send(Socket, [ + "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", + cow_http2:settings(#{})]), + %% Receive the server preface. + {ok, << Len:24 >>} = ssl:recv(Socket, 3, 1000), + {ok, << 4:8, 0:40, _:Len/binary >>} = ssl:recv(Socket, 6 + Len, 1000), + ok + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +disable_http2_prior_knowledge(Config) -> + doc("Ensure that we can disable prior knowledge HTTP/2 upgrade."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + protocols => [http] + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + {ok, Socket} = gen_tcp:connect("localhost", Port, [binary, {active, false}]), + %% Send a valid preface. + ok = gen_tcp:send(Socket, [ + "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", + cow_http2:settings(#{})]), + {ok, <<"HTTP/1.1 501">>} = gen_tcp:recv(Socket, 12, 1000), + ok + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +disable_http2_upgrade(Config) -> + doc("Ensure that we can disable HTTP/1.1 upgrade to HTTP/2."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + protocols => [http] + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + {ok, Socket} = gen_tcp:connect("localhost", Port, [binary, {active, false}]), + %% Send a valid preface. + ok = gen_tcp:send(Socket, [ + "GET / HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade, HTTP2-Settings\r\n" + "Upgrade: h2c\r\n" + "HTTP2-Settings: ", base64:encode(cow_http2:settings_payload(#{})), "\r\n", + "\r\n"]), + {ok, <<"HTTP/1.1 200">>} = gen_tcp:recv(Socket, 12, 1000), + ok + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +hibernate(Config) -> + doc("Ensure that we can enable hibernation for HTTP/1.1 connections."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + hibernate => true + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + StreamRef1 = gun:get(ConnPid, "/"), + StreamRef2 = gun:get(ConnPid, "/"), + StreamRef3 = gun:get(ConnPid, "/"), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef1), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef2), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef3), + gun:close(ConnPid) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + http10_keepalive_false(Config) -> doc("Confirm the option http10_keepalive => false disables keep-alive " "completely for HTTP/1.0 connections."), @@ -225,6 +323,68 @@ http10_keepalive_false(Config) -> cowboy:stop_listener(?FUNCTION_NAME) end. +idle_timeout_read_body(Config) -> + doc("Ensure the idle_timeout drops connections when the " + "connection is idle too long reading the request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 60000, + idle_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + _StreamRef = gun:post(ConnPid, "/echo/read_body", + #{<<"content-length">> => <<"12">>}), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +idle_timeout_read_body_pipeline(Config) -> + doc("Ensure the idle_timeout drops connections when the " + "connection is idle too long reading the request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 60000, + idle_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + StreamRef1 = gun:get(ConnPid, "/"), + StreamRef2 = gun:get(ConnPid, "/"), + _StreamRef3 = gun:post(ConnPid, "/echo/read_body", + #{<<"content-length">> => <<"12">>}), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef1), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef2), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +idle_timeout_skip_body(Config) -> + doc("Ensure the idle_timeout drops connections when the " + "connection is idle too long skipping the request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 60000, + idle_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + StreamRef = gun:post(ConnPid, "/", + #{<<"content-length">> => <<"12">>}), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + idle_timeout_infinity(Config) -> doc("Ensure the idle_timeout option accepts the infinity value."), {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ @@ -245,12 +405,90 @@ idle_timeout_infinity(Config) -> {'DOWN', Ref, process, Pid, Reason} -> error(Reason) after 1000 -> - ok + gun:close(ConnPid) end after cowboy:stop_listener(?FUNCTION_NAME) end. +idle_timeout_on_send(Config) -> + doc("Ensure the idle timeout is not reset when sending (by default)."), + do_idle_timeout_on_send(Config, http). + +%% Also used by http2_SUITE. +do_idle_timeout_on_send(Config, Protocol) -> + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + idle_timeout => 1000 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, Protocol}, {port, Port}|Config]), + {ok, Protocol} = gun:await_up(ConnPid), + timer:sleep(500), + #{socket := Socket} = gun:info(ConnPid), + Pid = get_remote_pid_tcp(Socket), + StreamRef = gun:get(ConnPid, "/streamed_result/10/250"), + Ref = erlang:monitor(process, Pid), + receive + {gun_response, ConnPid, StreamRef, nofin, _Status, _Headers} -> + do_idle_timeout_recv_loop(Ref, Pid, ConnPid, StreamRef, false) + after 2000 -> + error(timeout) + end + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +idle_timeout_reset_on_send(Config) -> + doc("Ensure the reset_idle_timeout_on_send results in the " + "idle timeout resetting when sending ."), + do_idle_timeout_reset_on_send(Config, http). + +%% Also used by http2_SUITE. +do_idle_timeout_reset_on_send(Config, Protocol) -> + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + idle_timeout => 1000, + reset_idle_timeout_on_send => true + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, Protocol}, {port, Port}|Config]), + {ok, Protocol} = gun:await_up(ConnPid), + timer:sleep(500), + #{socket := Socket} = gun:info(ConnPid), + Pid = get_remote_pid_tcp(Socket), + StreamRef = gun:get(ConnPid, "/streamed_result/10/250"), + Ref = erlang:monitor(process, Pid), + receive + {gun_response, ConnPid, StreamRef, nofin, _Status, _Headers} -> + do_idle_timeout_recv_loop(Ref, Pid, ConnPid, StreamRef, true) + after 2000 -> + error(timeout) + end + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +do_idle_timeout_recv_loop(Ref, Pid, ConnPid, StreamRef, ExpectCompletion) -> + receive + {gun_data, ConnPid, StreamRef, nofin, _Data} -> + do_idle_timeout_recv_loop(Ref, Pid, ConnPid, StreamRef, ExpectCompletion); + {gun_data, ConnPid, StreamRef, fin, _Data} when ExpectCompletion -> + gun:close(ConnPid); + {gun_data, ConnPid, StreamRef, fin, _Data} -> + gun:close(ConnPid), + error(completed); + {'DOWN', Ref, process, Pid, _} when ExpectCompletion -> + gun:close(ConnPid), + error(exited); + {'DOWN', Ref, process, Pid, _} -> + ok + after 2000 -> + error(timeout) + end. + persistent_term_router(Config) -> doc("The router can retrieve the routes from persistent_term storage."), case erlang:function_exported(persistent_term, get, 1) of @@ -274,6 +512,133 @@ do_persistent_term_router(Config) -> cowboy:stop_listener(?FUNCTION_NAME) end. +request_timeout(Config) -> + doc("Ensure the request_timeout drops connections when requests " + "fail to come in fast enough."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +request_timeout_pipeline(Config) -> + doc("Ensure the request_timeout drops connections when requests " + "fail to come in fast enough after pipelined requests went through."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + StreamRef1 = gun:get(ConnPid, "/"), + StreamRef2 = gun:get(ConnPid, "/"), + StreamRef3 = gun:get(ConnPid, "/"), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef1), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef2), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef3), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +request_timeout_pipeline_delay(Config) -> + doc("Ensure the request_timeout does not trigger on requests " + "coming in after a large request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), + {ok, http} = gun:await_up(ConnPid), + StreamRef1 = gun:post(ConnPid, "/", #{}, <<0:8000000>>), + StreamRef2 = gun:get(ConnPid, "/delay_hello"), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef1), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef2), + {error, {down, {shutdown, closed}}} = gun:await(ConnPid, undefined, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +request_timeout_skip_body(Config) -> + doc("Ensure the request_timeout drops connections when requests " + "fail to come in fast enough after skipping a request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + Client = raw_open([{type, tcp}, {port, Port}, {opts, []}|Config]), + ok = raw_send(Client, << + "POST / HTTP/1.1\r\n" + "host: localhost\r\n" + "content-length: 12\r\n\r\n" + >>), + Data = raw_recv_head(Client), + {'HTTP/1.1', 200, _, Rest0} = cow_http:parse_status_line(Data), + {Headers, Rest} = cow_http:parse_headers(Rest0), + {_, Len} = lists:keyfind(<<"content-length">>, 1, Headers), + <<"Hello world!">> = raw_recv_rest(Client, binary_to_integer(Len), Rest), + %% We then send the request data that should be skipped by Cowboy. + timer:sleep(100), + raw_send(Client, <<"Hello world!">>), + %% Connection should be closed by the request_timeout after that. + {error, closed} = raw_recv(Client, 1, 1000) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + +request_timeout_skip_body_more(Config) -> + doc("Ensure the request_timeout drops connections when requests " + "fail to come in fast enough after skipping a request body."), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ + env => #{dispatch => init_dispatch(Config)}, + request_timeout => 500 + }), + Port = ranch:get_port(?FUNCTION_NAME), + try + Client = raw_open([{type, tcp}, {port, Port}, {opts, []}|Config]), + ok = raw_send(Client, << + "POST / HTTP/1.1\r\n" + "host: localhost\r\n" + "content-length: 12\r\n\r\n" + >>), + Data = raw_recv_head(Client), + {'HTTP/1.1', 200, _, Rest0} = cow_http:parse_status_line(Data), + {Headers, Rest} = cow_http:parse_headers(Rest0), + {_, Len} = lists:keyfind(<<"content-length">>, 1, Headers), + <<"Hello world!">> = raw_recv_rest(Client, binary_to_integer(Len), Rest), + %% We then send the request data that should be skipped by Cowboy. + timer:sleep(100), + raw_send(Client, <<"Hello world!">>), + %% Send the start of another request. + ok = raw_send(Client, << + "GET / HTTP/1.1\r\n" + "host: localhost\r\n" + %% Missing final \r\n on purpose. + >>), + %% Connection should be closed by the request_timeout after that. + %% We attempt to send a 408 response on a best effort basis so + %% that is accepted as well. + case raw_recv(Client, 13, 1000) of + {error, closed} -> ok; + {ok, <<"HTTP/1.1 408 ", _/bits>>} -> ok + end + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + request_timeout_infinity(Config) -> doc("Ensure the request_timeout option accepts the infinity value."), {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], #{ @@ -292,7 +657,7 @@ request_timeout_infinity(Config) -> {'DOWN', Ref, process, Pid, Reason} -> error(Reason) after 1000 -> - ok + gun:close(ConnPid) end after cowboy:stop_listener(?FUNCTION_NAME) @@ -348,7 +713,8 @@ set_options_chunked_false_ignored(Config) -> %% is not disabled for that second request. StreamRef2 = gun:get(ConnPid, "/resp/stream_reply2/200"), {response, nofin, 200, Headers} = gun:await(ConnPid, StreamRef2), - {_, <<"chunked">>} = lists:keyfind(<<"transfer-encoding">>, 1, Headers) + {_, <<"chunked">>} = lists:keyfind(<<"transfer-encoding">>, 1, Headers), + gun:close(ConnPid) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -449,10 +815,10 @@ graceful_shutdown_connection(Config) -> doc("Check that the current request is handled before gracefully " "shutting down a connection."), Dispatch = cowboy_router:compile([{"localhost", [ + {"/hello", delay_hello_h, + #{delay => 0, notify_received => self()}}, {"/delay_hello", delay_hello_h, - #{delay => 500, notify_received => self()}}, - {"/long_delay_hello", delay_hello_h, - #{delay => 10000, notify_received => self()}} + #{delay => 1000, notify_received => self()}} ]}]), ProtoOpts = #{ env => #{dispatch => Dispatch} @@ -460,22 +826,27 @@ graceful_shutdown_connection(Config) -> {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], ProtoOpts), Port = ranch:get_port(?FUNCTION_NAME), try - ConnPid = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), - {ok, http} = gun:await_up(ConnPid), - #{socket := Socket} = gun:info(ConnPid), - CowboyConnPid = get_remote_pid_tcp(Socket), - CowboyConnRef = erlang:monitor(process, CowboyConnPid), - Ref1 = gun:get(ConnPid, "/delay_hello"), - Ref2 = gun:get(ConnPid, "/delay_hello"), - receive {request_received, <<"/delay_hello">>} -> ok end, + Client = raw_open([{type, tcp}, {port, Port}, {opts, []}|Config]), + ok = raw_send(Client, + "GET /delay_hello HTTP/1.1\r\n" + "Host: localhost\r\n\r\n" + "GET /hello HTTP/1.1\r\n" + "Host: localhost\r\n\r\n"), receive {request_received, <<"/delay_hello">>} -> ok end, + receive {request_received, <<"/hello">>} -> ok end, + CowboyConnPid = get_remote_pid_tcp(element(2, Client)), + CowboyConnRef = erlang:monitor(process, CowboyConnPid), ok = sys:terminate(CowboyConnPid, system_is_going_down), - {response, nofin, 200, RespHeaders} = gun:await(ConnPid, Ref1), - <<"close">> = proplists:get_value(<<"connection">>, RespHeaders), - {ok, RespBody} = gun:await_body(ConnPid, Ref1), - <<"Hello world!">> = iolist_to_binary(RespBody), - {error, {stream_error, _}} = gun:await(ConnPid, Ref2), - ok = gun_down(ConnPid), + Rest = case catch raw_recv_head(Client) of + {'EXIT', _} -> error(closed); + Data -> + {'HTTP/1.1', 200, _, Rest0} = cow_http:parse_status_line(Data), + {Headers, Rest1} = cow_http:parse_headers(Rest0), + <<"close">> = proplists:get_value(<<"connection">>, Headers), + Rest1 + end, + <<"Hello world!">> = raw_recv_rest(Client, byte_size(<<"Hello world!">>), Rest), + {error, closed} = raw_recv(Client, 0, 1000), receive {'DOWN', CowboyConnRef, process, CowboyConnPid, _Reason} -> ok @@ -486,6 +857,10 @@ graceful_shutdown_connection(Config) -> graceful_shutdown_listener(Config) -> doc("Check that connections are shut down gracefully when stopping a listener."), + TransOpts = #{ + socket_opts => [{port, 0}], + shutdown => 1000 %% Shorter timeout to make the test case faster. + }, Dispatch = cowboy_router:compile([{"localhost", [ {"/delay_hello", delay_hello_h, #{delay => 500, notify_received => self()}}, @@ -495,7 +870,7 @@ graceful_shutdown_listener(Config) -> ProtoOpts = #{ env => #{dispatch => Dispatch} }, - {ok, _} = cowboy:start_clear(?FUNCTION_NAME, [{port, 0}], ProtoOpts), + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, TransOpts, ProtoOpts), Port = ranch:get_port(?FUNCTION_NAME), ConnPid1 = gun_open([{type, tcp}, {protocol, http}, {port, Port}|Config]), Ref1 = gun:get(ConnPid1, "/delay_hello"), @@ -504,6 +879,8 @@ graceful_shutdown_listener(Config) -> %% Shutdown listener while the handlers are working. receive {request_received, <<"/delay_hello">>} -> ok end, receive {request_received, <<"/long_delay_hello">>} -> ok end, + %% Note: This call does not complete quickly and will + %% prevent other cowboy:stop_listener/1 calls to complete. ok = cowboy:stop_listener(?FUNCTION_NAME), %% Check that the 1st request is handled before shutting down. {response, nofin, 200, RespHeaders} = gun:await(ConnPid1, Ref1), @@ -514,3 +891,63 @@ graceful_shutdown_listener(Config) -> %% Check that the 2nd (very slow) request is not handled. {error, {stream_error, closed}} = gun:await(ConnPid2, Ref2), gun:close(ConnPid2). + +send_timeout_close(_Config) -> + doc("Check that connections are closed on send timeout."), + TransOpts = #{ + socket_opts => [ + {port, 0}, + {send_timeout, 100}, + {send_timeout_close, true}, + {sndbuf, 10} + ] + }, + Dispatch = cowboy_router:compile([{"localhost", [ + {"/endless", loop_handler_endless_h, #{delay => 100}} + ]}]), + ProtoOpts = #{ + env => #{dispatch => Dispatch}, + idle_timeout => infinity + }, + {ok, _} = cowboy:start_clear(?FUNCTION_NAME, TransOpts, ProtoOpts), + Port = ranch:get_port(?FUNCTION_NAME), + try + %% Connect a client that sends a request and waits indefinitely. + {ok, ClientSocket} = gen_tcp:connect("localhost", Port, + [{recbuf, 10}, {buffer, 10}, {active, false}, {packet, 0}]), + ok = gen_tcp:send(ClientSocket, [ + "GET /endless HTTP/1.1\r\n", + "Host: localhost:", integer_to_list(Port), "\r\n", + "x-test-pid: ", pid_to_list(self()), "\r\n\r\n" + ]), + %% Wait for the handler to start then get its pid, + %% the remote connection's pid and socket. + StreamPid = receive + {Self, StreamPid0, init} when Self =:= self() -> + StreamPid0 + after 1000 -> + error(timeout) + end, + ServerPid = ct_helper:get_remote_pid_tcp(ClientSocket), + {links, ServerLinks} = process_info(ServerPid, links), + [ServerSocket] = [PidOrPort || PidOrPort <- ServerLinks, is_port(PidOrPort)], + %% Poll the socket repeatedly until it is closed by the server. + WaitClosedFun = + fun F(T) when T =< 0 -> + error({status, prim_inet:getstatus(ServerSocket)}); + F(T) -> + Snooze = 100, + case inet:sockname(ServerSocket) of + {error, _} -> + timer:sleep(Snooze); + {ok, _} -> + timer:sleep(Snooze), + F(T - Snooze) + end + end, + ok = WaitClosedFun(2000), + false = erlang:is_process_alive(StreamPid), + false = erlang:is_process_alive(ServerPid) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. diff --git a/test/http_perf_SUITE.erl b/test/http_perf_SUITE.erl new file mode 100644 index 0000000..1484c03 --- /dev/null +++ b/test/http_perf_SUITE.erl @@ -0,0 +1,220 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(http_perf_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). +-import(cowboy_test, [gun_open/1]). + +%% ct. + +all() -> + %% @todo Enable HTTP/3 for this test suite. + cowboy_test:common_all() -- [{group, h3}, {group, h3_compress}]. + +groups() -> + cowboy_test:common_groups(ct_helper:all(?MODULE), no_parallel). + +init_per_suite(Config) -> + do_log("", []), + %% Optionally enable `perf` for the current node. +% spawn(fun() -> ct:pal(os:cmd("perf record -g -F 9999 -o /tmp/http_perf.data -p " ++ os:getpid() ++ " -- sleep 60")) end), + Config. + +end_per_suite(_) -> + ok. + +init_per_group(Name, Config) -> + [{group, Name}|cowboy_test:init_common_groups(Name, Config, ?MODULE, #{ + %% HTTP/1.1 + max_keepalive => infinity, + %% HTTP/2 + %% @todo Must configure Gun for performance too. + connection_window_margin_size => 64*1024, + enable_connect_protocol => true, + env => #{dispatch => init_dispatch(Config)}, + max_frame_size_sent => 64*1024, + max_frame_size_received => 16384 * 1024 - 1, + max_received_frame_rate => {10_000_000, 1}, + stream_window_data_threshold => 1024, + stream_window_margin_size => 64*1024 + })]. + +end_per_group(Name, _) -> + do_log("", []), + cowboy_test:stop_group(Name). + +%% Routes. + +init_dispatch(_) -> + cowboy_router:compile([{'_', [ + {"/", hello_h, []}, + {"/read_body", read_body_h, []} + ]}]). + +%% Tests: Hello world. + +plain_h_hello_1(Config) -> + doc("Plain HTTP handler Hello World; 10K requests per 1 client."), + do_bench_get(?FUNCTION_NAME, "/", #{}, 1, 10000, Config). + +plain_h_hello_10(Config) -> + doc("Plain HTTP handler Hello World; 10K requests per 10 clients."), + do_bench_get(?FUNCTION_NAME, "/", #{}, 10, 10000, Config). + +stream_h_hello_1(Config) -> + doc("Stream handler Hello World; 10K requests per 1 client."), + do_stream_h_hello(Config, 1). + +stream_h_hello_10(Config) -> + doc("Stream handler Hello World; 10K requests per 10 clients."), + do_stream_h_hello(Config, 10). + +do_stream_h_hello(Config, NumClients) -> + Ref = config(ref, Config), + ProtoOpts = ranch:get_protocol_options(Ref), + StreamHandlers = case ProtoOpts of + #{stream_handlers := _} -> [cowboy_compress_h, stream_hello_h]; + _ -> [stream_hello_h] + end, + ranch:set_protocol_options(Ref, ProtoOpts#{ + env => #{}, + stream_handlers => StreamHandlers + }), + do_bench_get(?FUNCTION_NAME, "/", #{}, NumClients, 10000, Config), + ranch:set_protocol_options(Ref, ProtoOpts). + +%% Tests: Large body upload. + +plain_h_1M_post_1(Config) -> + doc("Plain HTTP handler body reading; 10K requests per 1 client."), + do_bench_post(?FUNCTION_NAME, "/read_body", #{}, <<0:8_000_000>>, 1, 10000, Config). + +plain_h_1M_post_10(Config) -> + doc("Plain HTTP handler body reading; 10K requests per 10 clients."), + do_bench_post(?FUNCTION_NAME, "/read_body", #{}, <<0:8_000_000>>, 10, 10000, Config). + +plain_h_10G_post(Config) -> + doc("Plain HTTP handler body reading; 1 request with a 10GB body."), + do_bench_post_one_large(?FUNCTION_NAME, "/read_body", #{}, 10_000, <<0:8_000_000>>, Config). + +%% Internal. + +do_bench_get(What, Path, Headers, NumClients, NumRuns, Config) -> + Clients = [spawn_link(?MODULE, do_bench_get_proc, + [self(), What, Path, Headers, NumRuns, Config]) + || _ <- lists:seq(1, NumClients)], + {Time, _} = timer:tc(?MODULE, do_bench_wait, [What, Clients]), + do_log("~32s: ~8bµs ~8.1freqs/s", [ + [atom_to_list(config(group, Config)), $., atom_to_list(What)], + Time, + (NumClients * NumRuns) / Time * 1_000_000]), + ok. + +do_bench_get_proc(Parent, What, Path, Headers0, NumRuns, Config) -> + ConnPid = gun_open(Config), + Headers = Headers0#{<<"accept-encoding">> => <<"gzip">>}, + Parent ! {What, ready}, + receive {What, go} -> ok end, + do_bench_get_run(ConnPid, Path, Headers, NumRuns), + Parent ! {What, done}, + gun:close(ConnPid). + +do_bench_get_run(_, _, _, 0) -> + ok; +do_bench_get_run(ConnPid, Path, Headers, Num) -> + Ref = gun:request(ConnPid, <<"GET">>, Path, Headers, <<>>), + {response, IsFin, 200, _RespHeaders} = gun:await(ConnPid, Ref, infinity), + {ok, _} = case IsFin of + nofin -> gun:await_body(ConnPid, Ref, infinity); + fin -> {ok, <<>>} + end, + do_bench_get_run(ConnPid, Path, Headers, Num - 1). + +do_bench_post(What, Path, Headers, Body, NumClients, NumRuns, Config) -> + Clients = [spawn_link(?MODULE, do_bench_post_proc, + [self(), What, Path, Headers, Body, NumRuns, Config]) + || _ <- lists:seq(1, NumClients)], + {Time, _} = timer:tc(?MODULE, do_bench_wait, [What, Clients]), + do_log("~32s: ~8bµs ~8.1freqs/s", [ + [atom_to_list(config(group, Config)), $., atom_to_list(What)], + Time, + (NumClients * NumRuns) / Time * 1_000_000]), + ok. + +do_bench_post_proc(Parent, What, Path, Headers0, Body, NumRuns, Config) -> + ConnPid = gun_open(Config), + Headers = Headers0#{<<"accept-encoding">> => <<"gzip">>}, + Parent ! {What, ready}, + receive {What, go} -> ok end, + do_bench_post_run(ConnPid, Path, Headers, Body, NumRuns), + Parent ! {What, done}, + gun:close(ConnPid). + +do_bench_post_run(_, _, _, _, 0) -> + ok; +do_bench_post_run(ConnPid, Path, Headers, Body, Num) -> + Ref = gun:request(ConnPid, <<"POST">>, Path, Headers, Body), + {response, IsFin, 200, _RespHeaders} = gun:await(ConnPid, Ref, infinity), + {ok, _} = case IsFin of + nofin -> gun:await_body(ConnPid, Ref, infinity); + fin -> {ok, <<>>} + end, + do_bench_post_run(ConnPid, Path, Headers, Body, Num - 1). + +do_bench_post_one_large(What, Path, Headers, NumChunks, BodyChunk, Config) -> + Client = spawn_link(?MODULE, do_bench_post_one_large_proc, + [self(), What, Path, Headers, NumChunks, BodyChunk, Config]), + {Time, _} = timer:tc(?MODULE, do_bench_wait, [What, [Client]]), + do_log("~32s: ~8bµs ~8.1freqs/s", [ + [atom_to_list(config(group, Config)), $., atom_to_list(What)], + Time, + 1 / Time * 1_000_000]), + ok. + +do_bench_post_one_large_proc(Parent, What, Path, Headers0, NumChunks, BodyChunk, Config) -> + ConnPid = gun_open(Config), + Headers = Headers0#{<<"accept-encoding">> => <<"gzip">>}, + Parent ! {What, ready}, + receive {What, go} -> ok end, + StreamRef = gun:headers(ConnPid, <<"POST">>, Path, Headers#{ + <<"content-length">> => integer_to_binary(NumChunks * byte_size(BodyChunk)) + }), + do_bench_post_one_large_run(ConnPid, StreamRef, NumChunks - 1, BodyChunk), + {response, IsFin, 200, _RespHeaders} = gun:await(ConnPid, StreamRef, infinity), + {ok, _} = case IsFin of + nofin -> gun:await_body(ConnPid, StreamRef, infinity); + fin -> {ok, <<>>} + end, + Parent ! {What, done}, + gun:close(ConnPid). + +do_bench_post_one_large_run(ConnPid, StreamRef, 0, BodyChunk) -> + gun:data(ConnPid, StreamRef, fin, BodyChunk); +do_bench_post_one_large_run(ConnPid, StreamRef, NumChunks, BodyChunk) -> + gun:data(ConnPid, StreamRef, nofin, BodyChunk), + do_bench_post_one_large_run(ConnPid, StreamRef, NumChunks - 1, BodyChunk). + +do_bench_wait(What, Clients) -> + _ = [receive {What, ready} -> ok end || _ <- Clients], + _ = [ClientPid ! {What, go} || ClientPid <- Clients], + _ = [receive {What, done} -> ok end || _ <- Clients], + ok. + +do_log(Str, Args) -> + ct:log(Str, Args), + io:format(ct_default_gl, Str ++ "~n", Args). diff --git a/test/loop_handler_SUITE.erl b/test/loop_handler_SUITE.erl index a7b5303..71aa801 100644 --- a/test/loop_handler_SUITE.erl +++ b/test/loop_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -32,7 +32,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Dispatch configuration. @@ -40,7 +40,10 @@ init_dispatch(_) -> cowboy_router:compile([{'_', [ {"/long_polling", long_polling_h, []}, {"/loop_body", loop_handler_body_h, []}, - {"/loop_timeout", loop_handler_timeout_h, []} + {"/loop_request_timeout", loop_handler_timeout_h, []}, + {"/loop_timeout_init", loop_handler_timeout_init_h, []}, + {"/loop_timeout_info", loop_handler_timeout_info_h, []}, + {"/loop_timeout_hibernate", loop_handler_timeout_hibernate_h, []} ]}]). %% Tests. @@ -79,6 +82,31 @@ long_polling_pipeline(Config) -> request_timeout(Config) -> doc("Ensure that the request_timeout isn't applied when a request is ongoing."), ConnPid = gun_open(Config), - Ref = gun:get(ConnPid, "/loop_timeout", [{<<"accept-encoding">>, <<"gzip">>}]), + Ref = gun:get(ConnPid, "/loop_request_timeout", [{<<"accept-encoding">>, <<"gzip">>}]), {response, nofin, 200, _} = gun:await(ConnPid, Ref, 10000), ok. + +timeout_hibernate(Config) -> + doc("Ensure that loop handler idle timeouts don't trigger after hibernate is returned."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/loop_timeout_hibernate", [{<<"accept-encoding">>, <<"gzip">>}]), + {response, fin, 200, _} = gun:await(ConnPid, Ref), + ok. + +timeout_info(Config) -> + doc("Ensure that loop handler idle timeouts trigger on time when set in info/3."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/loop_timeout_info", [{<<"accept-encoding">>, <<"gzip">>}]), + {response, fin, 299, _} = gun:await(ConnPid, Ref), + ok. + +timeout_init(Config) -> + doc("Ensure that loop handler idle timeouts trigger on time when set in init/2."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/loop_timeout_init?timeout=1000", + [{<<"accept-encoding">>, <<"gzip">>}]), + {response, fin, 200, _} = gun:await(ConnPid, Ref), + Ref2 = gun:get(ConnPid, "/loop_timeout_init?timeout=100", + [{<<"accept-encoding">>, <<"gzip">>}]), + {response, fin, 299, _} = gun:await(ConnPid, Ref2), + ok. diff --git a/test/metrics_SUITE.erl b/test/metrics_SUITE.erl index 74a259f..784bec1 100644 --- a/test/metrics_SUITE.erl +++ b/test/metrics_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -44,6 +44,8 @@ init_per_group(Name = h2, Config) -> init_per_group(Name = h2c, Config) -> Config1 = cowboy_test:init_http(Name, init_plain_opts(Config), Config), lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3, Config) -> + cowboy_test:init_http3(Name, init_plain_opts(Config), Config); init_per_group(Name = http_compress, Config) -> cowboy_test:init_http(Name, init_compress_opts(Config), Config); init_per_group(Name = https_compress, Config) -> @@ -52,10 +54,12 @@ init_per_group(Name = h2_compress, Config) -> cowboy_test:init_http2(Name, init_compress_opts(Config), Config); init_per_group(Name = h2c_compress, Config) -> Config1 = cowboy_test:init_http(Name, init_compress_opts(Config), Config), - lists:keyreplace(protocol, 1, Config1, {protocol, http2}). + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3_compress, Config) -> + cowboy_test:init_http3(Name, init_compress_opts(Config), Config). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_plain_opts(Config) -> #{ @@ -157,16 +161,24 @@ do_get(Path, UserData, Config) -> #{ ref := _, pid := From, - streamid := 1, - reason := normal, + streamid := StreamID, + reason := normal, %% @todo Getting h3_no_error here. req := #{}, informational := [], user_data := UserData } = Metrics, + do_check_streamid(StreamID, Config), %% All good! gun:close(ConnPid) end. +do_check_streamid(StreamID, Config) -> + case config(protocol, Config) of + http -> 1 = StreamID; + http2 -> 1 = StreamID; + http3 -> 0 = StreamID + end. + post_body(Config) -> doc("Confirm metrics are correct for a normal POST request."), %% Perform a POST request. @@ -218,12 +230,13 @@ post_body(Config) -> #{ ref := _, pid := From, - streamid := 1, + streamid := StreamID, reason := normal, req := #{}, informational := [], user_data := #{} } = Metrics, + do_check_streamid(StreamID, Config), %% All good! gun:close(ConnPid) end. @@ -273,12 +286,13 @@ no_resp_body(Config) -> #{ ref := _, pid := From, - streamid := 1, + streamid := StreamID, reason := normal, req := #{}, informational := [], user_data := #{} } = Metrics, + do_check_streamid(StreamID, Config), %% All good! gun:close(ConnPid) end. @@ -291,7 +305,8 @@ early_error(Config) -> %% reason in both protocols. {Method, Headers, Status, Error} = case config(protocol, Config) of http -> {<<"GET">>, [{<<"host">>, <<"host:port">>}], 400, protocol_error}; - http2 -> {<<"TRACE">>, [], 501, no_error} + http2 -> {<<"TRACE">>, [], 501, no_error}; + http3 -> {<<"TRACE">>, [], 501, h3_no_error} end, Ref = gun:request(ConnPid, Method, "/", [ {<<"accept-encoding">>, <<"gzip">>}, @@ -305,7 +320,7 @@ early_error(Config) -> #{ ref := _, pid := From, - streamid := 1, + streamid := StreamID, reason := {stream_error, Error, _}, partial_req := #{}, resp_status := Status, @@ -313,6 +328,7 @@ early_error(Config) -> early_error_time := _, resp_body_length := 0 } = Metrics, + do_check_streamid(StreamID, Config), ExpectedRespHeaders = maps:from_list(RespHeaders), %% All good! gun:close(ConnPid) @@ -321,7 +337,8 @@ early_error(Config) -> early_error_request_line(Config) -> case config(protocol, Config) of http -> do_early_error_request_line(Config); - http2 -> doc("There are no request lines in HTTP/2.") + http2 -> doc("There are no request lines in HTTP/2."); + http3 -> doc("There are no request lines in HTTP/3.") end. do_early_error_request_line(Config) -> @@ -341,7 +358,7 @@ do_early_error_request_line(Config) -> #{ ref := _, pid := From, - streamid := 1, + streamid := StreamID, reason := {connection_error, protocol_error, _}, partial_req := #{}, resp_status := 400, @@ -349,6 +366,7 @@ do_early_error_request_line(Config) -> early_error_time := _, resp_body_length := 0 } = Metrics, + do_check_streamid(StreamID, Config), ExpectedRespHeaders = maps:from_list(RespHeaders), %% All good! ok @@ -362,7 +380,9 @@ stream_reply(Config) -> ws(Config) -> case config(protocol, Config) of http -> do_ws(Config); - http2 -> doc("It is not currently possible to switch to Websocket over HTTP/2.") + %% @todo The test can be implemented for HTTP/2. + http2 -> doc("It is not currently possible to switch to Websocket over HTTP/2."); + http3 -> {skip, "Gun does not currently support Websocket over HTTP/3."} end. do_ws(Config) -> @@ -405,7 +425,7 @@ do_ws(Config) -> #{ ref := _, pid := From, - streamid := 1, + streamid := StreamID, reason := switch_protocol, req := #{}, %% A 101 upgrade response was sent. @@ -420,6 +440,7 @@ do_ws(Config) -> }], user_data := #{} } = Metrics, + do_check_streamid(StreamID, Config), %% All good! ok end, @@ -438,7 +459,15 @@ error_response(Config) -> {<<"accept-encoding">>, <<"gzip">>}, {<<"x-test-pid">>, pid_to_list(self())} ]), - {response, fin, 500, RespHeaders} = gun:await(ConnPid, Ref, infinity), + Protocol = config(protocol, Config), + RespHeaders = case gun:await(ConnPid, Ref, infinity) of + {response, fin, 500, RespHeaders0} -> + RespHeaders0; + %% The RST_STREAM arrived before the start of the response. + %% See maybe_h3_error comment for details. + {error, {stream_error, {stream_error, h3_internal_error, _}}} when Protocol =:= http3 -> + unknown + end, timer:sleep(100), %% Receive the metrics and validate them. receive @@ -463,25 +492,33 @@ error_response(Config) -> resp_headers := ExpectedRespHeaders, resp_body_length := 0 } = Metrics, - ExpectedRespHeaders = maps:from_list(RespHeaders), + case RespHeaders of + %% The HTTP/3 stream has reset too early so we can't + %% verify the response headers. + unknown -> + ok; + _ -> + ExpectedRespHeaders = maps:from_list(RespHeaders) + end, %% The request process executed normally. #{procs := Procs} = Metrics, [{_, #{ spawn := ProcSpawn, exit := ProcExit, - reason := {crash, _StackTrace} + reason := {crash, StackTrace} }}] = maps:to_list(Procs), true = ProcSpawn =< ProcExit, %% Confirm other metadata are as expected. #{ ref := _, pid := From, - streamid := 1, - reason := {internal_error, {'EXIT', _Pid, {crash, _StackTrace}}, 'Stream process crashed.'}, + streamid := StreamID, + reason := {internal_error, {'EXIT', _Pid, {crash, StackTrace}}, 'Stream process crashed.'}, req := #{}, informational := [], user_data := #{} } = Metrics, + do_check_streamid(StreamID, Config), %% All good! gun:close(ConnPid) end. @@ -495,7 +532,15 @@ error_response_after_reply(Config) -> {<<"accept-encoding">>, <<"gzip">>}, {<<"x-test-pid">>, pid_to_list(self())} ]), - {response, fin, 200, RespHeaders} = gun:await(ConnPid, Ref, infinity), + Protocol = config(protocol, Config), + RespHeaders = case gun:await(ConnPid, Ref, infinity) of + {response, fin, 200, RespHeaders0} -> + RespHeaders0; + %% The RST_STREAM arrived before the start of the response. + %% See maybe_h3_error comment for details. + {error, {stream_error, {stream_error, h3_internal_error, _}}} when Protocol =:= http3 -> + unknown + end, timer:sleep(100), %% Receive the metrics and validate them. receive @@ -520,25 +565,33 @@ error_response_after_reply(Config) -> resp_headers := ExpectedRespHeaders, resp_body_length := 0 } = Metrics, - ExpectedRespHeaders = maps:from_list(RespHeaders), + case RespHeaders of + %% The HTTP/3 stream has reset too early so we can't + %% verify the response headers. + unknown -> + ok; + _ -> + ExpectedRespHeaders = maps:from_list(RespHeaders) + end, %% The request process executed normally. #{procs := Procs} = Metrics, [{_, #{ spawn := ProcSpawn, exit := ProcExit, - reason := {crash, _StackTrace} + reason := {crash, StackTrace} }}] = maps:to_list(Procs), true = ProcSpawn =< ProcExit, %% Confirm other metadata are as expected. #{ ref := _, pid := From, - streamid := 1, - reason := {internal_error, {'EXIT', _Pid, {crash, _StackTrace}}, 'Stream process crashed.'}, + streamid := StreamID, + reason := {internal_error, {'EXIT', _Pid, {crash, StackTrace}}, 'Stream process crashed.'}, req := #{}, informational := [], user_data := #{} } = Metrics, + do_check_streamid(StreamID, Config), %% All good! gun:close(ConnPid) end. diff --git a/test/misc_SUITE.erl b/test/misc_SUITE.erl index 6245636..e834156 100644 --- a/test/misc_SUITE.erl +++ b/test/misc_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -21,29 +21,29 @@ -import(cowboy_test, [gun_open/1]). all() -> - [{group, app}, {group, set_env}|cowboy_test:common_all()]. + [{group, app}, {group, env}|cowboy_test:common_all()]. groups() -> Common = ct_helper:all(?MODULE) - -- [restart_gracefully, set_env, set_env_missing], + -- [restart_gracefully, get_env, set_env, set_env_missing], [ {app, [], [restart_gracefully]}, - {set_env, [parallel], [set_env, set_env_missing]} + {env, [parallel], [get_env, set_env, set_env_missing]} |cowboy_test:common_groups(Common)]. init_per_group(Name=app, Config) -> cowboy_test:init_http(Name, #{ env => #{dispatch => init_dispatch(Config)} }, Config); -init_per_group(set_env, Config) -> +init_per_group(env, Config) -> Config; init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). -end_per_group(set_env, _) -> +end_per_group(env, _) -> ok; end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_dispatch(_) -> cowboy_router:compile([{"localhost", [ @@ -84,6 +84,26 @@ router_invalid_path(Config) -> {response, _, 400, _} = gun:await(ConnPid, Ref), ok. +get_env(Config0) -> + doc("Ensure we can retrieve middleware environment values."), + Dispatch = init_dispatch(Config0), + _Config = cowboy_test:init_http(?FUNCTION_NAME, #{ + env => #{ + dispatch => Dispatch, + the_key => the_value + } + }, Config0), + try + Dispatch = cowboy:get_env(?FUNCTION_NAME, dispatch), + Dispatch = cowboy:get_env(?FUNCTION_NAME, dispatch, the_default), + the_value = cowboy:get_env(?FUNCTION_NAME, the_key), + the_value = cowboy:get_env(?FUNCTION_NAME, the_key, the_default), + {'EXIT', _} = (catch cowboy:get_env(?FUNCTION_NAME, missing_key)), + the_default = cowboy:get_env(?FUNCTION_NAME, missing_key, the_default) + after + cowboy:stop_listener(?FUNCTION_NAME) + end. + set_env(Config0) -> doc("Live replace a middleware environment value."), Config = cowboy_test:init_http(?FUNCTION_NAME, #{ diff --git a/test/plain_handler_SUITE.erl b/test/plain_handler_SUITE.erl index e980d5b..7684e6b 100644 --- a/test/plain_handler_SUITE.erl +++ b/test/plain_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -39,12 +39,13 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Routes. init_dispatch(_) -> cowboy_router:compile([{"localhost", [ + {"/crash/external_exit", crash_h, external_exit}, {"/crash/no_reply", crash_h, no_reply}, {"/crash/reply", crash_h, reply} ]}]). @@ -58,8 +59,15 @@ crash_after_reply(Config) -> Ref = gun:get(ConnPid, "/crash/reply", [ {<<"accept-encoding">>, <<"gzip">>} ]), - {response, fin, 200, _} = gun:await(ConnPid, Ref), - {error, timeout} = gun:await(ConnPid, Ref, 1000), + Protocol = config(protocol, Config), + _ = case gun:await(ConnPid, Ref) of + {response, fin, 200, _} -> + {error, timeout} = gun:await(ConnPid, Ref, 1000); + %% See maybe_h3_error comment for details. + {error, {stream_error, {stream_error, h3_internal_error, _}}} + when Protocol =:= http3 -> + ok + end, gun:close(ConnPid). crash_before_reply(Config) -> @@ -71,3 +79,13 @@ crash_before_reply(Config) -> ]), {response, fin, 500, _} = gun:await(ConnPid, Ref), gun:close(ConnPid). + +external_exit_before_reply(Config) -> + doc("A plain handler exits externally before a response was sent " + "results in a 500 response."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/crash/external_exit", [ + {<<"accept-encoding">>, <<"gzip">>} + ]), + {response, fin, 500, _} = gun:await(ConnPid, Ref), + gun:close(ConnPid). diff --git a/test/proxy_header_SUITE.erl b/test/proxy_header_SUITE.erl index be6ab04..c8f63a3 100644 --- a/test/proxy_header_SUITE.erl +++ b/test/proxy_header_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -71,6 +71,30 @@ init_dispatch() -> %% Tests. +fail_gracefully_on_disconnect(Config) -> + doc("Probing a port must not generate a crash"), + {ok, Socket} = gen_tcp:connect("localhost", config(port, Config), + [binary, {active, false}, {packet, raw}]), + timer:sleep(50), + Pid = case config(type, Config) of + tcp -> ct_helper:get_remote_pid_tcp(Socket); + %% We connect to a TLS port using a TCP socket so we need + %% to first obtain the remote pid of the TCP socket, which + %% is a TLS socket on the server, and then get the real + %% remote pid from its state. + ssl -> ct_helper:get_remote_pid_tls_state(ct_helper:get_remote_pid_tcp(Socket)) + end, + Ref = erlang:monitor(process, Pid), + gen_tcp:close(Socket), + receive + {'DOWN', Ref, process, Pid, {shutdown, closed}} -> + ok; + {'DOWN', Ref, process, Pid, Reason} -> + error(Reason) + after 500 -> + error(timeout) + end. + v1_proxy_header(Config) -> doc("Confirm we can read the proxy header at the start of the connection."), ProxyInfo = #{ @@ -126,7 +150,8 @@ do_proxy_header_https(Config, ProxyInfo) -> {ok, Socket0} = gen_tcp:connect("localhost", config(port, Config), [binary, {active, false}, {packet, raw}]), ok = gen_tcp:send(Socket0, ranch_proxy_header:header(ProxyInfo)), - {ok, Socket} = ssl:connect(Socket0, [], 1000), + TlsOpts = ct_helper:get_certs_from_ets(), + {ok, Socket} = ssl:connect(Socket0, TlsOpts, 1000), do_proxy_header_http_common({raw_client, Socket, ssl}, ProxyInfo). do_proxy_header_http_common(Client, ProxyInfo) -> @@ -151,7 +176,9 @@ do_proxy_header_h2(Config, ProxyInfo) -> {ok, Socket0} = gen_tcp:connect("localhost", config(port, Config), [binary, {active, false}, {packet, raw}]), ok = gen_tcp:send(Socket0, ranch_proxy_header:header(ProxyInfo)), - {ok, Socket} = ssl:connect(Socket0, [{alpn_advertised_protocols, [<<"h2">>]}], 1000), + TlsOpts = ct_helper:get_certs_from_ets(), + {ok, Socket} = ssl:connect(Socket0, + [{alpn_advertised_protocols, [<<"h2">>]}|TlsOpts], 1000), do_proxy_header_h2_common({raw_client, Socket, ssl}, ProxyInfo). do_proxy_header_h2c(Config, ProxyInfo) -> diff --git a/test/req_SUITE.erl b/test/req_SUITE.erl index 352b2a0..9adc6e4 100644 --- a/test/req_SUITE.erl +++ b/test/req_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -46,7 +46,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Routes. @@ -57,13 +57,16 @@ init_dispatch(Config) -> {"/resp/:key[/:arg]", resp_h, []}, {"/multipart[/:key]", multipart_h, []}, {"/args/:key/:arg[/:default]", echo_h, []}, - {"/crash/:key/period", echo_h, #{length => 999999999, period => 1000, crash => true}}, + {"/crash/:key/period", echo_h, + #{length => 999999999, period => 1000, timeout => 5000, crash => true}}, {"/no-opts/:key", echo_h, #{crash => true}}, {"/opts/:key/length", echo_h, #{length => 1000}}, {"/opts/:key/period", echo_h, #{length => 999999999, period => 2000}}, {"/opts/:key/timeout", echo_h, #{timeout => 1000, crash => true}}, {"/100-continue/:key", echo_h, []}, {"/full/:key", echo_h, []}, + {"/auto-sync/:key", echo_h, []}, + {"/auto-async/:key", echo_h, []}, {"/spawn/:key", echo_h, []}, {"/no/:key", echo_h, []}, {"/direct/:key/[...]", echo_h, []}, @@ -104,13 +107,17 @@ do_get(Path, Config) -> do_get(Path, Headers, Config) -> ConnPid = gun_open(Config), Ref = gun:get(ConnPid, Path, [{<<"accept-encoding">>, <<"gzip">>}|Headers]), - {response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref, infinity), - {ok, RespBody} = case IsFin of - nofin -> gun:await_body(ConnPid, Ref, infinity); - fin -> {ok, <<>>} - end, - gun:close(ConnPid), - {Status, RespHeaders, do_decode(RespHeaders, RespBody)}. + case gun:await(ConnPid, Ref, infinity) of + {response, IsFin, Status, RespHeaders} -> + {ok, RespBody} = case IsFin of + nofin -> gun:await_body(ConnPid, Ref, infinity); + fin -> {ok, <<>>} + end, + gun:close(ConnPid), + {Status, RespHeaders, do_decode(RespHeaders, RespBody)}; + {error, {stream_error, Error}} -> + Error + end. do_get_body(Path, Config) -> do_get_body(Path, [], Config). @@ -139,7 +146,9 @@ do_get_inform(Path, Config) -> fin -> {ok, <<>>} end, gun:close(ConnPid), - {InfoStatus, InfoHeaders, RespStatus, RespHeaders, do_decode(RespHeaders, RespBody)} + {InfoStatus, InfoHeaders, RespStatus, RespHeaders, do_decode(RespHeaders, RespBody)}; + {error, {stream_error, Error}} -> + Error end. do_decode(Headers, Body) -> @@ -181,25 +190,20 @@ bindings(Config) -> cert(Config) -> case config(type, Config) of tcp -> doc("TLS certificates can only be provided over TLS."); - ssl -> do_cert(Config) + ssl -> do_cert(Config); + quic -> do_cert(Config) end. -do_cert(Config0) -> +do_cert(Config) -> doc("A client TLS certificate was provided."), - {CaCert, Cert, Key} = ct_helper:make_certs(), - Config = [{tls_opts, [ - {cert, Cert}, - {key, Key}, - {cacerts, [CaCert]} - ]}|Config0], Cert = do_get_body("/cert", Config), Cert = do_get_body("/direct/cert", Config), ok. cert_undefined(Config) -> doc("No client TLS certificate was provided."), - <<"undefined">> = do_get_body("/cert", Config), - <<"undefined">> = do_get_body("/direct/cert", Config), + <<"undefined">> = do_get_body("/cert", [{no_cert, true}|Config]), + <<"undefined">> = do_get_body("/direct/cert", [{no_cert, true}|Config]), ok. header(Config) -> @@ -239,8 +243,10 @@ match_cookies(Config) -> <<"#{}">> = do_get_body("/match/cookies", [{<<"cookie">>, "a=b; c=d"}], Config), <<"#{a => <<\"b\">>}">> = do_get_body("/match/cookies/a", [{<<"cookie">>, "a=b; c=d"}], Config), <<"#{c => <<\"d\">>}">> = do_get_body("/match/cookies/c", [{<<"cookie">>, "a=b; c=d"}], Config), - <<"#{a => <<\"b\">>,c => <<\"d\">>}">> = do_get_body("/match/cookies/a/c", - [{<<"cookie">>, "a=b; c=d"}], Config), + case do_get_body("/match/cookies/a/c", [{<<"cookie">>, "a=b; c=d"}], Config) of + <<"#{a => <<\"b\">>,c => <<\"d\">>}">> -> ok; + <<"#{c => <<\"d\">>,a => <<\"b\">>}">> -> ok + end, %% Ensure match errors result in a 400 response. {400, _, _} = do_get("/match/cookies/a/c", [{<<"cookie">>, "a=b"}], Config), @@ -253,11 +259,21 @@ match_qs(Config) -> <<"#{}">> = do_get_body("/match/qs?a=b&c=d", Config), <<"#{a => <<\"b\">>}">> = do_get_body("/match/qs/a?a=b&c=d", Config), <<"#{c => <<\"d\">>}">> = do_get_body("/match/qs/c?a=b&c=d", Config), - <<"#{a => <<\"b\">>,c => <<\"d\">>}">> = do_get_body("/match/qs/a/c?a=b&c=d", Config), - <<"#{a => <<\"b\">>,c => true}">> = do_get_body("/match/qs/a/c?a=b&c", Config), - <<"#{a => true,c => <<\"d\">>}">> = do_get_body("/match/qs/a/c?a&c=d", Config), + case do_get_body("/match/qs/a/c?a=b&c=d", Config) of + <<"#{a => <<\"b\">>,c => <<\"d\">>}">> -> ok; + <<"#{c => <<\"d\">>,a => <<\"b\">>}">> -> ok + end, + case do_get_body("/match/qs/a/c?a=b&c", Config) of + <<"#{a => <<\"b\">>,c => true}">> -> ok; + <<"#{c => true,a => <<\"b\">>}">> -> ok + end, + case do_get_body("/match/qs/a/c?a&c=d", Config) of + <<"#{a => true,c => <<\"d\">>}">> -> ok; + <<"#{c => <<\"d\">>,a => true}">> -> ok + end, %% Ensure match errors result in a 400 response. {400, _, _} = do_get("/match/qs/a/c?a=b", [], Config), + {400, _, _} = do_get("/match/qs_with_constraints", [], Config), %% This function is tested more extensively through unit tests. ok. @@ -308,7 +324,7 @@ filter_then_parse_cookies(Config) -> [{<<"cookie">>, "bad name=strawberry"}], Config), <<"[{<<\"cake\">>,<<\"strawberry\">>}]">> = do_get_body("/filter_then_parse_cookies", - [{<<"cookie">>, "bad name=strawberry; cake=strawberry"}], Config), + [{<<"cookie">>, "bad name=strawberry; another bad name=strawberry; cake=strawberry"}], Config), <<"[]">> = do_get_body("/filter_then_parse_cookies", [{<<"cookie">>, "Blocked by http://www.example.com/upgrade-to-remove"}], Config), @@ -377,7 +393,8 @@ port(Config) -> Port = do_get_body("/direct/port", Config), ExpectedPort = case config(type, Config) of tcp -> <<"80">>; - ssl -> <<"443">> + ssl -> <<"443">>; + quic -> <<"443">> end, ExpectedPort = do_get_body("/port", [{<<"host">>, <<"localhost">>}], Config), ExpectedPort = do_get_body("/direct/port", [{<<"host">>, <<"localhost">>}], Config), @@ -403,7 +420,8 @@ do_scheme(Path, Config) -> Transport = config(type, Config), case do_get_body(Path, Config) of <<"http">> when Transport =:= tcp -> ok; - <<"https">> when Transport =:= ssl -> ok + <<"https">> when Transport =:= ssl -> ok; + <<"https">> when Transport =:= quic -> ok end. sock(Config) -> @@ -416,7 +434,8 @@ uri(Config) -> doc("Request URI building/modification."), Scheme = case config(type, Config) of tcp -> <<"http">>; - ssl -> <<"https">> + ssl -> <<"https">>; + quic -> <<"https">> end, SLen = byte_size(Scheme), Port = integer_to_binary(config(port, Config)), @@ -450,7 +469,8 @@ do_version(Path, Config) -> Protocol = config(protocol, Config), case do_get_body(Path, Config) of <<"HTTP/1.1">> when Protocol =:= http -> ok; - <<"HTTP/2">> when Protocol =:= http2 -> ok + <<"HTTP/2">> when Protocol =:= http2 -> ok; + <<"HTTP/3">> when Protocol =:= http3 -> ok end. %% Tests: Request body. @@ -504,11 +524,19 @@ read_body_period(Config) -> %% for 2 seconds. The test succeeds if we get some of the data back %% (meaning the function will have returned after the period ends). gun:data(ConnPid, Ref, nofin, Body), - {response, nofin, 200, _} = gun:await(ConnPid, Ref, infinity), - {data, _, Data} = gun:await(ConnPid, Ref, infinity), - %% We expect to read at least some data. - true = Data =/= <<>>, - gun:close(ConnPid). + Response = gun:await(ConnPid, Ref, infinity), + case Response of + {response, nofin, 200, _} -> + {data, _, Data} = gun:await(ConnPid, Ref, infinity), + %% We expect to read at least some data. + true = Data =/= <<>>, + gun:close(ConnPid); + %% We got a crash, likely because the environment + %% was overloaded and the timeout triggered. Try again. + {response, _, 500, _} -> + gun:close(ConnPid), + read_body_period(Config) + end. %% We expect a crash. do_read_body_timeout(Path, Body, Config) -> @@ -516,9 +544,21 @@ do_read_body_timeout(Path, Body, Config) -> Ref = gun:headers(ConnPid, "POST", Path, [ {<<"content-length">>, integer_to_binary(byte_size(Body))} ]), - {response, _, 500, _} = gun:await(ConnPid, Ref, infinity), + case gun:await(ConnPid, Ref, infinity) of + {response, _, 500, _} -> + ok; + %% See do_maybe_h3_error comment for details. + {error, {stream_error, {stream_error, h3_internal_error, _}}} -> + ok + end, gun:close(ConnPid). +read_body_auto(Config) -> + doc("Read the request body using auto mode."), + <<0:80000000>> = do_body("POST", "/auto-sync/read_body", [], <<0:80000000>>, Config), + <<0:80000000>> = do_body("POST", "/auto-async/read_body", [], <<0:80000000>>, Config), + ok. + read_body_spawn(Config) -> doc("Confirm we can use cowboy_req:read_body/1,2 from another process."), <<"hello world!">> = do_body("POST", "/spawn/read_body", [], "hello world!", Config), @@ -549,7 +589,8 @@ do_read_body_expect_100_continue(Path, Config) -> fin -> {ok, <<>>} end, gun:close(ConnPid), - do_decode(RespHeaders, RespBody). + do_decode(RespHeaders, RespBody), + ok. read_urlencoded_body(Config) -> doc("application/x-www-form-urlencoded request body."), @@ -576,8 +617,20 @@ do_read_urlencoded_body_too_large(Path, Body, Config) -> {<<"content-length">>, integer_to_binary(iolist_size(Body))} ]), gun:data(ConnPid, Ref, fin, Body), - {response, _, 413, _} = gun:await(ConnPid, Ref, infinity), - gun:close(ConnPid). + Response = gun:await(ConnPid, Ref, infinity), + gun:close(ConnPid), + case Response of + {response, _, 413, _} -> + ok; + %% We got the wrong crash, likely because the environment + %% was overloaded and the timeout triggered. Try again. + {response, _, 408, _} -> + do_read_urlencoded_body_too_large(Path, Body, Config); + %% Timing issues make it possible for the connection to be + %% closed before the data went through. We retry. + {error, {stream_error, {closed, {error,closed}}}} -> + do_read_urlencoded_body_too_large(Path, Body, Config) + end. read_urlencoded_body_too_long(Config) -> doc("application/x-www-form-urlencoded request body sent too slow. " @@ -592,25 +645,37 @@ do_read_urlencoded_body_too_long(Path, Body, Config) -> {<<"content-length">>, integer_to_binary(byte_size(Body) * 2)} ]), gun:data(ConnPid, Ref, nofin, Body), - {response, _, 408, RespHeaders} = gun:await(ConnPid, Ref, infinity), - _ = case config(protocol, Config) of - http -> + Protocol = config(protocol, Config), + case gun:await(ConnPid, Ref, infinity) of + {response, _, 408, RespHeaders} when Protocol =:= http -> %% 408 error responses should close HTTP/1.1 connections. - {_, <<"close">>} = lists:keyfind(<<"connection">>, 1, RespHeaders); - http2 -> - ok - end, - gun:close(ConnPid). + {_, <<"close">>} = lists:keyfind(<<"connection">>, 1, RespHeaders), + gun:close(ConnPid); + {response, _, 408, _} when Protocol =:= http2; Protocol =:= http3 -> + gun:close(ConnPid); + %% We must have hit the timeout due to busy CI environment. Retry. + {response, _, 500, _} -> + gun:close(ConnPid), + do_read_urlencoded_body_too_long(Path, Body, Config) + end. read_and_match_urlencoded_body(Config) -> doc("Read and match an application/x-www-form-urlencoded request body."), <<"#{}">> = do_body("POST", "/match/body_qs", [], "a=b&c=d", Config), <<"#{a => <<\"b\">>}">> = do_body("POST", "/match/body_qs/a", [], "a=b&c=d", Config), <<"#{c => <<\"d\">>}">> = do_body("POST", "/match/body_qs/c", [], "a=b&c=d", Config), - <<"#{a => <<\"b\">>,c => <<\"d\">>}">> - = do_body("POST", "/match/body_qs/a/c", [], "a=b&c=d", Config), - <<"#{a => <<\"b\">>,c => true}">> = do_body("POST", "/match/body_qs/a/c", [], "a=b&c", Config), - <<"#{a => true,c => <<\"d\">>}">> = do_body("POST", "/match/body_qs/a/c", [], "a&c=d", Config), + case do_body("POST", "/match/body_qs/a/c", [], "a=b&c=d", Config) of + <<"#{a => <<\"b\">>,c => <<\"d\">>}">> -> ok; + <<"#{c => <<\"d\">>,a => <<\"b\">>}">> -> ok + end, + case do_body("POST", "/match/body_qs/a/c", [], "a=b&c", Config) of + <<"#{a => <<\"b\">>,c => true}">> -> ok; + <<"#{c => true,a => <<\"b\">>}">> -> ok + end, + case do_body("POST", "/match/body_qs/a/c", [], "a&c=d", Config) of + <<"#{a => true,c => <<\"d\">>}">> -> ok; + <<"#{c => <<\"d\">>,a => true}">> -> ok + end, %% Ensure match errors result in a 400 response. {400, _} = do_body_error("POST", "/match/body_qs/a/c", [], "a=b", Config), %% Ensure parse errors result in a 400 response. @@ -768,18 +833,18 @@ set_resp_cookie(Config) -> doc("Response using set_resp_cookie."), %% Single cookie, no options. {200, Headers1, _} = do_get("/resp/set_resp_cookie3", Config), - {_, <<"mycookie=myvalue; Version=1">>} + {_, <<"mycookie=myvalue">>} = lists:keyfind(<<"set-cookie">>, 1, Headers1), %% Single cookie, with options. {200, Headers2, _} = do_get("/resp/set_resp_cookie4", Config), - {_, <<"mycookie=myvalue; Version=1; Path=/resp/set_resp_cookie4">>} + {_, <<"mycookie=myvalue; Path=/resp/set_resp_cookie4">>} = lists:keyfind(<<"set-cookie">>, 1, Headers2), %% Multiple cookies. {200, Headers3, _} = do_get("/resp/set_resp_cookie3/multiple", Config), [_, _] = [H || H={<<"set-cookie">>, _} <- Headers3], %% Overwrite previously set cookie. {200, Headers4, _} = do_get("/resp/set_resp_cookie3/overwrite", Config), - {_, <<"mycookie=overwrite; Version=1">>} + {_, <<"mycookie=overwrite">>} = lists:keyfind(<<"set-cookie">>, 1, Headers4), ok. @@ -787,13 +852,22 @@ set_resp_header(Config) -> doc("Response using set_resp_header."), {200, Headers, <<"OK">>} = do_get("/resp/set_resp_header", Config), true = lists:keymember(<<"content-type">>, 1, Headers), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/set_resp_header_cookie", Config)), ok. set_resp_headers(Config) -> doc("Response using set_resp_headers."), - {200, Headers, <<"OK">>} = do_get("/resp/set_resp_headers", Config), - true = lists:keymember(<<"content-type">>, 1, Headers), - true = lists:keymember(<<"content-encoding">>, 1, Headers), + {200, Headers1, <<"OK">>} = do_get("/resp/set_resp_headers", Config), + true = lists:keymember(<<"content-type">>, 1, Headers1), + true = lists:keymember(<<"content-encoding">>, 1, Headers1), + {200, Headers2, <<"OK">>} = do_get("/resp/set_resp_headers_list", Config), + true = lists:keymember(<<"content-type">>, 1, Headers2), + true = lists:keymember(<<"content-encoding">>, 1, Headers2), + {_, <<"one, two">>} = lists:keyfind(<<"test-header">>, 1, Headers2), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/set_resp_headers_cookie", Config)), + {500, _, _} = do_maybe_h3_error3(do_get("/resp/set_resp_headers_list_cookie", Config)), ok. resp_header(Config) -> @@ -855,22 +929,52 @@ delete_resp_header(Config) -> false = lists:keymember(<<"content-type">>, 1, Headers), ok. +%% Data may be lost due to how RESET_STREAM QUIC frame works. +%% Because there is ongoing work for a better way to reset streams +%% (https://www.ietf.org/archive/id/draft-ietf-quic-reliable-stream-reset-03.html) +%% we convert the error to a 500 to keep the tests more explicit +%% at what we expect. +%% @todo When RESET_STREAM_AT gets added we can remove this function. +do_maybe_h3_error2({stream_error, h3_internal_error, _}) -> {500, []}; +do_maybe_h3_error2(Result) -> Result. + +do_maybe_h3_error3({stream_error, h3_internal_error, _}) -> {500, [], <<>>}; +do_maybe_h3_error3(Result) -> Result. + inform2(Config) -> doc("Informational response(s) without headers, followed by the real response."), {102, [], 200, _, _} = do_get_inform("/resp/inform2/102", Config), {102, [], 200, _, _} = do_get_inform("/resp/inform2/binary", Config), - {500, _} = do_get_inform("/resp/inform2/error", Config), + {500, _} = do_maybe_h3_error2(do_get_inform("/resp/inform2/error", Config)), {102, [], 200, _, _} = do_get_inform("/resp/inform2/twice", Config), - ok. + %% With HTTP/1.1 and HTTP/2 we will not get an error. + %% With HTTP/3 however the stream will occasionally + %% be reset before Gun receives the response. + case do_get_inform("/resp/inform2/after_reply", Config) of + {200, _} -> + ok; + {stream_error, h3_internal_error, _} -> + ok + end. inform3(Config) -> doc("Informational response(s) with headers, followed by the real response."), Headers = [{<<"ext-header">>, <<"ext-value">>}], {102, Headers, 200, _, _} = do_get_inform("/resp/inform3/102", Config), {102, Headers, 200, _, _} = do_get_inform("/resp/inform3/binary", Config), - {500, _} = do_get_inform("/resp/inform3/error", Config), + {500, _} = do_maybe_h3_error2(do_get_inform("/resp/inform3/error", Config)), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _} = do_maybe_h3_error2(do_get_inform("/resp/inform3/set_cookie", Config)), {102, Headers, 200, _, _} = do_get_inform("/resp/inform3/twice", Config), - ok. + %% With HTTP/1.1 and HTTP/2 we will not get an error. + %% With HTTP/3 however the stream will occasionally + %% be reset before Gun receives the response. + case do_get_inform("/resp/inform3/after_reply", Config) of + {200, _} -> + ok; + {stream_error, h3_internal_error, _} -> + ok + end. reply2(Config) -> doc("Response with default headers and no body."), @@ -878,9 +982,8 @@ reply2(Config) -> {201, _, _} = do_get("/resp/reply2/201", Config), {404, _, _} = do_get("/resp/reply2/404", Config), {200, _, _} = do_get("/resp/reply2/binary", Config), - {500, _, _} = do_get("/resp/reply2/error", Config), - %% @todo We want to crash when reply or stream_reply is called twice. - %% How to test this properly? This isn't enough. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/reply2/error", Config)), + %% @todo How to test this properly? This isn't enough. {200, _, _} = do_get("/resp/reply2/twice", Config), ok. @@ -892,7 +995,9 @@ reply3(Config) -> true = lists:keymember(<<"content-type">>, 1, Headers2), {404, Headers3, _} = do_get("/resp/reply3/404", Config), true = lists:keymember(<<"content-type">>, 1, Headers3), - {500, _, _} = do_get("/resp/reply3/error", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/resp/reply3/error", Config)), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/reply3/set_cookie", Config)), ok. reply4(Config) -> @@ -900,11 +1005,11 @@ reply4(Config) -> {200, _, <<"OK">>} = do_get("/resp/reply4/200", Config), {201, _, <<"OK">>} = do_get("/resp/reply4/201", Config), {404, _, <<"OK">>} = do_get("/resp/reply4/404", Config), - {500, _, _} = do_get("/resp/reply4/error", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/resp/reply4/error", Config)), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/reply4/set_cookie", Config)), ok. -%% @todo Crash when stream_reply is called twice. - stream_reply2(Config) -> doc("Response with default headers and streamed body."), Body = <<0:8000000>>, @@ -912,9 +1017,37 @@ stream_reply2(Config) -> {201, _, Body} = do_get("/resp/stream_reply2/201", Config), {404, _, Body} = do_get("/resp/stream_reply2/404", Config), {200, _, Body} = do_get("/resp/stream_reply2/binary", Config), - {500, _, _} = do_get("/resp/stream_reply2/error", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/resp/stream_reply2/error", Config)), ok. +stream_reply2_twice(Config) -> + doc("Attempting to stream a response twice results in a crash."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/resp/stream_reply2/twice", + [{<<"accept-encoding">>, <<"gzip">>}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref, infinity), + Protocol = config(protocol, Config), + Flavor = config(flavor, Config), + case {Protocol, Flavor, gun:await_body(ConnPid, Ref, infinity)} of + %% In HTTP/1.1 we cannot propagate an error at that point. + %% The response will simply not have a body. + {http, vanilla, {ok, <<>>}} -> + ok; + %% When compression was used we do get gzip headers. But + %% we do not have any data in the zlib stream. + {http, compress, {ok, Data}} -> + Z = zlib:open(), + zlib:inflateInit(Z, 31), + 0 = iolist_size(zlib:inflate(Z, Data)), + ok; + %% In HTTP/2 and HTTP/3 the stream gets reset with an appropriate error. + {http2, _, {error, {stream_error, {stream_error, internal_error, _}}}} -> + ok; + {http3, _, {error, {stream_error, {stream_error, h3_internal_error, _}}}} -> + ok + end, + gun:close(ConnPid). + stream_reply3(Config) -> doc("Response with additional headers and streamed body."), Body = <<0:8000000>>, @@ -924,7 +1057,9 @@ stream_reply3(Config) -> true = lists:keymember(<<"content-type">>, 1, Headers2), {404, Headers3, Body} = do_get("/resp/stream_reply3/404", Config), true = lists:keymember(<<"content-type">>, 1, Headers3), - {500, _, _} = do_get("/resp/stream_reply3/error", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/resp/stream_reply3/error", Config)), + %% The set-cookie header is special. set_resp_cookie must be used. + {500, _, _} = do_maybe_h3_error3(do_get("/resp/stream_reply3/set_cookie", Config)), ok. stream_body_fin0(Config) -> @@ -1008,8 +1143,11 @@ stream_body_content_length_nofin_error(Config) -> end end; http2 -> - %% @todo HTTP2 should have the same content-length checks - ok + %% @todo HTTP/2 should have the same content-length checks. + {skip, "Implement the test for HTTP/2."}; + http3 -> + %% @todo HTTP/3 should have the same content-length checks. + {skip, "Implement the test for HTTP/3."} end. stream_body_concurrent(Config) -> @@ -1104,6 +1242,35 @@ stream_trailers_no_te(Config) -> <<"Hello world!">> = do_decode(RespHeaders, RespBody), gun:close(ConnPid). +stream_trailers_set_cookie(Config) -> + doc("Trying to send set-cookie in trailers should result in a crash."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/resp/stream_trailers/set_cookie", [ + {<<"accept-encoding">>, <<"gzip">>}, + {<<"te">>, <<"trailers">>} + ]), + Protocol = config(protocol, Config), + case gun:await(ConnPid, Ref, infinity) of + {response, nofin, 200, _} when Protocol =:= http -> + %% Trailers are not sent because of the stream error. + {ok, _Body} = gun:await_body(ConnPid, Ref, infinity), + {error, timeout} = gun:await_body(ConnPid, Ref, 1000), + ok; + {response, nofin, 200, _} when Protocol =:= http2 -> + {error, {stream_error, {stream_error, internal_error, _}}} + = gun:await_body(ConnPid, Ref, infinity), + ok; + {response, nofin, 200, _} when Protocol =:= http3 -> + {error, {stream_error, {stream_error, h3_internal_error, _}}} + = gun:await_body(ConnPid, Ref, infinity), + ok; + %% The RST_STREAM arrived before the start of the response. + %% See maybe_h3_error comment for details. + {error, {stream_error, {stream_error, h3_internal_error, _}}} when Protocol =:= http3 -> + ok + end, + gun:close(ConnPid). + do_trailers(Path, Config) -> ConnPid = gun_open(Config), Ref = gun:get(ConnPid, Path, [ @@ -1127,26 +1294,45 @@ do_trailers(Path, Config) -> push(Config) -> case config(protocol, Config) of http -> do_push_http("/resp/push", Config); - http2 -> do_push_http2(Config) + http2 -> do_push_http2(Config); + http3 -> {skip, "Implement server push for HTTP/3."} end. +push_after_reply(Config) -> + doc("Trying to push a response after the final response results in a crash."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/resp/push/after_reply", []), + %% With HTTP/1.1 and HTTP/2 we will not get an error. + %% With HTTP/3 however the stream will occasionally + %% be reset before Gun receives the response. + case gun:await(ConnPid, Ref, infinity) of + {response, fin, 200, _} -> + ok; + {error, {stream_error, {stream_error, h3_internal_error, _}}} -> + ok + end, + gun:close(ConnPid). + push_method(Config) -> case config(protocol, Config) of http -> do_push_http("/resp/push/method", Config); - http2 -> do_push_http2_method(Config) + http2 -> do_push_http2_method(Config); + http3 -> {skip, "Implement server push for HTTP/3."} end. push_origin(Config) -> case config(protocol, Config) of http -> do_push_http("/resp/push/origin", Config); - http2 -> do_push_http2_origin(Config) + http2 -> do_push_http2_origin(Config); + http3 -> {skip, "Implement server push for HTTP/3."} end. push_qs(Config) -> case config(protocol, Config) of http -> do_push_http("/resp/push/qs", Config); - http2 -> do_push_http2_qs(Config) + http2 -> do_push_http2_qs(Config); + http3 -> {skip, "Implement server push for HTTP/3."} end. do_push_http(Path, Config) -> @@ -1154,7 +1340,7 @@ do_push_http(Path, Config) -> ConnPid = gun_open(Config), Ref = gun:get(ConnPid, Path, []), {response, fin, 200, _} = gun:await(ConnPid, Ref, infinity), - ok. + gun:close(ConnPid). do_push_http2(Config) -> doc("Pushed responses."), diff --git a/test/rest_handler_SUITE.erl b/test/rest_handler_SUITE.erl index 43695c3..a3d9533 100644 --- a/test/rest_handler_SUITE.erl +++ b/test/rest_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -32,7 +32,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Dispatch configuration. @@ -52,6 +52,7 @@ init_dispatch(_) -> {"/content_types_accepted", content_types_accepted_h, []}, {"/content_types_provided", content_types_provided_h, []}, {"/delete_resource", delete_resource_h, []}, + {"/create_resource", create_resource_h, []}, {"/expires", expires_h, []}, {"/generate_etag", generate_etag_h, []}, {"/if_range", if_range_h, []}, @@ -84,7 +85,7 @@ accept_callback_missing(Config) -> {<<"accept-encoding">>, <<"gzip">>}, {<<"content-type">>, <<"text/plain">>} ], <<"Missing!">>), - {response, fin, 500, _} = gun:await(ConnPid, Ref), + {response, fin, 500, _} = do_maybe_h3_error(gun:await(ConnPid, Ref)), ok. accept_callback_patch_false(Config) -> @@ -126,7 +127,7 @@ do_accept_callback_true(Config, Fun) -> ok. charset_in_content_types_provided(Config) -> - doc("When a charset is matched explictly in content_types_provided, " + doc("When a charset is matched explicitly in content_types_provided, " "that charset is used and the charsets_provided callback is ignored."), ConnPid = gun_open(Config), Ref = gun:get(ConnPid, "/charset_in_content_types_provided", [ @@ -403,6 +404,18 @@ content_types_accepted_wildcard_param_content_type_with_param(Config) -> {response, fin, 204, _} = gun:await(ConnPid, Ref), ok. +content_types_provided_invalid_type(Config) -> + doc("When an invalid type is returned from the " + "content_types_provided callback, the " + "resource is incorrect and a 500 response is expected."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/content_types_provided?invalid-type", [ + {<<"accept">>, <<"*/*">>}, + {<<"accept-encoding">>, <<"gzip">>} + ]), + {response, _, 500, _} = do_maybe_h3_error(gun:await(ConnPid, Ref)), + ok. + content_types_provided_wildcard_param_no_accept_param(Config) -> doc("When a wildcard is returned for parameters from the " "content_types_provided callback, an accept header " @@ -471,7 +484,30 @@ delete_resource_missing(Config) -> Ref = gun:delete(ConnPid, "/delete_resource?missing", [ {<<"accept-encoding">>, <<"gzip">>} ]), - {response, _, 500, _} = gun:await(ConnPid, Ref), + {response, _, 500, _} = do_maybe_h3_error(gun:await(ConnPid, Ref)), + ok. + +create_resource_created(Config) -> + doc("POST to an existing resource to create a new resource. " + "When the accept callback returns {created, NewURI}, " + "the expected reply is 201 Created."), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/create_resource?created", [ + {<<"content-type">>, <<"application/text">>} + ], <<"hello">>, #{}), + {response, _, 201, _} = gun:await(ConnPid, Ref), + ok. + +create_resource_see_other(Config) -> + doc("POST to an existing resource to create a new resource. " + "When the accept callback returns {see_other, NewURI}, " + "the expected reply is 303 See Other with a location header set."), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/create_resource?see_other", [ + {<<"content-type">>, <<"application/text">>} + ], <<"hello">>, #{}), + {response, _, 303, RespHeaders} = gun:await(ConnPid, Ref), + {_, _} = lists:keyfind(<<"location">>, 1, RespHeaders), ok. error_on_malformed_accept(Config) -> @@ -547,6 +583,17 @@ generate_etag_missing(Config) -> false = lists:keyfind(<<"etag">>, 1, Headers), ok. +generate_etag_undefined(Config) -> + doc("The etag header must not be sent when " + "the generate_etag callback returns undefined."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/generate_etag?undefined", [ + {<<"accept-encoding">>, <<"gzip">>} + ]), + {response, _, 200, Headers} = gun:await(ConnPid, Ref), + false = lists:keyfind(<<"etag">>, 1, Headers), + ok. + generate_etag_binary_strong(Config) -> doc("The etag header must be sent when the generate_etag " "callback returns a strong binary. (RFC7232 2.3)"), @@ -615,10 +662,16 @@ do_generate_etag(Config, Qs, ReqHeaders, Status, Etag) -> {<<"accept-encoding">>, <<"gzip">>} |ReqHeaders ]), - {response, _, Status, RespHeaders} = gun:await(ConnPid, Ref), + {response, _, Status, RespHeaders} = do_maybe_h3_error(gun:await(ConnPid, Ref)), Etag = lists:keyfind(<<"etag">>, 1, RespHeaders), ok. +%% See do_maybe_h3_error2 comment. +do_maybe_h3_error({error, {stream_error, {stream_error, h3_internal_error, _}}}) -> + {response, fin, 500, []}; +do_maybe_h3_error(Result) -> + Result. + if_range_etag_equal(Config) -> doc("When the if-range header matches, a 206 partial content " "response is expected for an otherwise valid range request. (RFC7233 3.2)"), @@ -764,6 +817,7 @@ provide_callback(Config) -> ]), {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), {_, <<"text/plain">>} = lists:keyfind(<<"content-type">>, 1, Headers), + {_, <<"HEAD, GET, OPTIONS">>} = lists:keyfind(<<"allow">>, 1, Headers), {ok, <<"This is REST!">>} = gun:await_body(ConnPid, Ref), ok. @@ -771,7 +825,7 @@ provide_callback_missing(Config) -> doc("A 500 response must be sent when the ProvideCallback can't be called."), ConnPid = gun_open(Config), Ref = gun:get(ConnPid, "/provide_callback_missing", [{<<"accept-encoding">>, <<"gzip">>}]), - {response, fin, 500, _} = gun:await(ConnPid, Ref), + {response, fin, 500, _} = do_maybe_h3_error(gun:await(ConnPid, Ref)), ok. provide_range_callback(Config) -> @@ -927,7 +981,7 @@ provide_range_callback_missing(Config) -> {<<"accept-encoding">>, <<"gzip">>}, {<<"range">>, <<"bytes=0-">>} ]), - {response, fin, 500, _} = gun:await(ConnPid, Ref), + {response, fin, 500, _} = do_maybe_h3_error(gun:await(ConnPid, Ref)), ok. range_ignore_unknown_unit(Config) -> diff --git a/test/rfc6585_SUITE.erl b/test/rfc6585_SUITE.erl index 1f65f78..4a627e5 100644 --- a/test/rfc6585_SUITE.erl +++ b/test/rfc6585_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -30,7 +30,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_dispatch(_) -> cowboy_router:compile([{"[...]", [ diff --git a/test/rfc7230_SUITE.erl b/test/rfc7230_SUITE.erl index 9846a0f..d0da0df 100644 --- a/test/rfc7230_SUITE.erl +++ b/test/rfc7230_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2015-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -22,6 +22,7 @@ -import(cowboy_test, [raw_open/1]). -import(cowboy_test, [raw_send/2]). -import(cowboy_test, [raw_recv_head/1]). +-import(cowboy_test, [raw_recv_rest/3]). -import(cowboy_test, [raw_recv/3]). suite() -> @@ -63,13 +64,7 @@ do_raw(Config, Data) -> {Headers, Rest2} = cow_http:parse_headers(Rest), case lists:keyfind(<<"content-length">>, 1, Headers) of {_, LengthBin} when LengthBin =/= <<"0">> -> - Length = binary_to_integer(LengthBin), - Body = if - byte_size(Rest2) =:= Length -> Rest2; - true -> - {ok, Body0} = raw_recv(Client, Length - byte_size(Rest2), 5000), - << Rest2/bits, Body0/bits >> - end, + Body = raw_recv_rest(Client, binary_to_integer(LengthBin), Rest2), #{client => Client, version => Version, code => Code, reason => Reason, headers => Headers, body => Body}; _ -> #{client => Client, version => Version, code => Code, reason => Reason, headers => Headers, body => <<>>} @@ -1149,18 +1144,19 @@ reject_invalid_content_length(Config) -> %with a message body too large must be rejected with a 413 status %code and the closing of the connection. (RFC7230 3.3.2) -ignore_content_length_when_transfer_encoding(Config) -> +reject_when_both_content_length_and_transfer_encoding(Config) -> doc("When a message includes both transfer-encoding and content-length " - "headers, the content-length header must be removed before processing " - "the request. (RFC7230 3.3.3)"), - #{code := 200, body := <<"Hello world!">>} = do_raw(Config, [ + "headers, the message may be an attempt at request smuggling. It " + "must be rejected with a 400 status code and the closing of the " + "connection. (RFC7230 3.3.3)"), + #{code := 400, client := Client} = do_raw(Config, [ "POST /echo/read_body HTTP/1.1\r\n" "Host: localhost\r\n" "Transfer-encoding: chunked\r\n" "Content-length: 12\r\n" "\r\n" "6\r\nHello \r\n5\r\nworld\r\n1\r\n!\r\n0\r\n\r\n"]), - ok. + {error, closed} = raw_recv(Client, 0, 1000). %socket_error_while_reading_body(Config) -> %If a socket error occurs while reading the body the server @@ -1512,6 +1508,28 @@ http10_no_connection_header_close(Config) -> {_, <<"close">>} = lists:keyfind(<<"connection">>, 1, RespHeaders), {error, closed} = raw_recv(Client, 0, 1000). +connection_invalid(Config) -> + doc("HTTP/1.1 requests with an invalid Connection header " + "must be rejected with a 400 status code and the closing " + "of the connection. (RFC7230 6.1)"), + #{code := 400, client := Client} = do_raw(Config, [ + "GET / HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: jndi{ldap127\r\n" + "\r\n"]), + {error, closed} = raw_recv(Client, 0, 1000). + +http10_connection_invalid(Config) -> + doc("HTTP/1.0 requests with an invalid Connection header " + "must be rejected with a 400 status code and the closing " + "of the connection. (RFC7230 6.1)"), + #{code := 400, client := Client} = do_raw(Config, [ + "GET / HTTP/1.0\r\n" + "Host: localhost\r\n" + "Connection: jndi{ldap127\r\n" + "\r\n"]), + {error, closed} = raw_recv(Client, 0, 1000). + limit_requests_keepalive(Config) -> doc("The maximum number of requests sent using a persistent connection " "must be subject to configuration. The connection must be closed " diff --git a/test/rfc7231_SUITE.erl b/test/rfc7231_SUITE.erl index 6c74391..183fa0f 100644 --- a/test/rfc7231_SUITE.erl +++ b/test/rfc7231_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -35,7 +35,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_dispatch(_) -> cowboy_router:compile([{"[...]", [ @@ -230,13 +230,15 @@ expect(Config) -> {<<"expect">>, <<"100-continue">>} ]), {inform, 100, _} = gun:await(ConnPid, Ref), - ok. + gun:close(ConnPid). http10_expect(Config) -> case config(protocol, Config) of http -> do_http10_expect(Config); http2 -> + expect(Config); + http3 -> expect(Config) end. @@ -303,6 +305,9 @@ expect_discard_body_close(Config) -> do_expect_discard_body_close(Config); http2 -> doc("There's no reason to close the connection when using HTTP/2, " + "even if a stream body is too large. We just cancel the stream."); + http3 -> + doc("There's no reason to close the connection when using HTTP/3, " "even if a stream body is too large. We just cancel the stream.") end. @@ -424,8 +429,10 @@ http10_status_code_100(Config) -> http -> doc("The 100 Continue status code must not " "be sent to HTTP/1.0 endpoints. (RFC7231 6.2)"), - do_http10_status_code_1xx(100, Config); + do_unsupported_status_code_1xx(100, Config); http2 -> + status_code_100(Config); + http3 -> status_code_100(Config) end. @@ -434,12 +441,16 @@ http10_status_code_101(Config) -> http -> doc("The 101 Switching Protocols status code must not " "be sent to HTTP/1.0 endpoints. (RFC7231 6.2)"), - do_http10_status_code_1xx(101, Config); + do_unsupported_status_code_1xx(101, Config); http2 -> + status_code_101(Config); + http3 -> + %% While 101 is not supported by HTTP/3, there is no + %% wording in RFC9114 that forbids sending it. status_code_101(Config) end. -do_http10_status_code_1xx(StatusCode, Config) -> +do_unsupported_status_code_1xx(StatusCode, Config) -> ConnPid = gun_open(Config, #{http_opts => #{version => 'HTTP/1.0'}}), Ref = gun:get(ConnPid, "/resp/inform2/" ++ integer_to_list(StatusCode), [ {<<"accept-encoding">>, <<"gzip">>} @@ -653,7 +664,9 @@ status_code_408_connection_close(Config) -> http -> do_http11_status_code_408_connection_close(Config); http2 -> - doc("HTTP/2 connections are not closed on 408 responses.") + doc("HTTP/2 connections are not closed on 408 responses."); + http3 -> + doc("HTTP/3 connections are not closed on 408 responses.") end. do_http11_status_code_408_connection_close(Config) -> @@ -744,7 +757,9 @@ status_code_426_upgrade_header(Config) -> http -> do_status_code_426_upgrade_header(Config); http2 -> - doc("HTTP/2 does not support the HTTP/1.1 Upgrade mechanism.") + doc("HTTP/2 does not support the HTTP/1.1 Upgrade mechanism."); + http3 -> + doc("HTTP/3 does not support the HTTP/1.1 Upgrade mechanism.") end. do_status_code_426_upgrade_header(Config) -> diff --git a/test/rfc7538_SUITE.erl b/test/rfc7538_SUITE.erl index 5eb9705..ea51209 100644 --- a/test/rfc7538_SUITE.erl +++ b/test/rfc7538_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -30,7 +30,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_dispatch(_) -> cowboy_router:compile([{"[...]", [ diff --git a/test/rfc7540_SUITE.erl b/test/rfc7540_SUITE.erl index 6d8aa91..76aa95f 100644 --- a/test/rfc7540_SUITE.erl +++ b/test/rfc7540_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,6 +12,12 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +%% Note that Cowboy does not implement the PRIORITY mechanism. +%% Everyone has been moving away from it and it is widely seen +%% as a failure. Setting priorities has been counter productive +%% with regards to performance. Clients have been moving away +%% from the mechanism. + -module(rfc7540_SUITE). -compile(export_all). -compile(nowarn_export_all). @@ -28,9 +34,10 @@ all() -> [{group, clear}, {group, tls}]. groups() -> - Modules = ct_helper:all(?MODULE), - Clear = [M || M <- Modules, lists:sublist(atom_to_list(M), 4) =/= "alpn"] -- [prior_knowledge_reject_tls], - TLS = [M || M <- Modules, lists:sublist(atom_to_list(M), 4) =:= "alpn"] ++ [prior_knowledge_reject_tls], + Tests = ct_helper:all(?MODULE), + RejectTLS = [http_upgrade_reject_tls, prior_knowledge_reject_tls], + Clear = [T || T <- Tests, lists:sublist(atom_to_list(T), 4) =/= "alpn"] -- RejectTLS, + TLS = [T || T <- Tests, lists:sublist(atom_to_list(T), 4) =:= "alpn"] ++ RejectTLS, [{clear, [parallel], Clear}, {tls, [parallel], TLS}]. init_per_group(Name = clear, Config) -> @@ -62,6 +69,24 @@ init_routes(_) -> [ %% Starting HTTP/2 for "http" URIs. +http_upgrade_reject_tls(Config) -> + doc("Implementations that support HTTP/2 over TLS must use ALPN. (RFC7540 3.4)"), + TlsOpts = ct_helper:get_certs_from_ets(), + {ok, Socket} = ssl:connect("localhost", config(port, Config), + [binary, {active, false}|TlsOpts]), + %% Send a valid preface. + ok = ssl:send(Socket, [ + "GET / HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade, HTTP2-Settings\r\n" + "Upgrade: h2c\r\n" + "HTTP2-Settings: ", base64:encode(cow_http2:settings_payload(#{})), "\r\n", + "\r\n"]), + %% We expect the server to send an HTTP 400 error + %% when trying to use HTTP/2 without going through ALPN negotiation. + {ok, <<"HTTP/1.1 400">>} = ssl:recv(Socket, 12, 1000), + ok. + http_upgrade_ignore_h2(Config) -> doc("An h2 token in an Upgrade field must be ignored. (RFC7540 3.2)"), {ok, Socket} = gen_tcp:connect("localhost", config(port, Config), [binary, {active, false}]), @@ -483,14 +508,6 @@ http_upgrade_client_preface_settings_ack_timeout(Config) -> %% important, an OPTIONS request can be used to perform the upgrade to %% HTTP/2, at the cost of an additional round trip. -%% @todo If we ever handle priority, we need to check that the initial -%% HTTP/1.1 request has default priority. The relevant RFC quote is: -%% -%% 3.2 -%% The HTTP/1.1 request that is sent prior to upgrade is assigned a -%% stream identifier of 1 (see Section 5.1.1) with default priority -%% values (Section 5.3.5). - http_upgrade_response(Config) -> doc("A response must be sent to the initial HTTP/1.1 request " "after switching to HTTP/2. The response must use " @@ -589,16 +606,20 @@ http_upgrade_response_half_closed(Config) -> alpn_ignore_h2c(Config) -> doc("An h2c ALPN protocol identifier must be ignored. (RFC7540 3.3)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2c">>, <<"http/1.1">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2c">>, <<"http/1.1">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"http/1.1">>} = ssl:negotiated_protocol(Socket), ok. alpn_server_preface(Config) -> doc("The first frame must be a SETTINGS frame " "for the server connection preface. (RFC7540 3.3, RFC7540 3.5, RFC7540 6.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Receive the server preface. {ok, << _:24, 4:8, 0:40 >>} = ssl:recv(Socket, 9, 1000), @@ -607,8 +628,10 @@ alpn_server_preface(Config) -> alpn_client_preface_timeout(Config) -> doc("Clients negotiating HTTP/2 and not sending a preface in " "a timely manner must be disconnected."), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Receive the server preface. {ok, << Len:24 >>} = ssl:recv(Socket, 3, 1000), @@ -620,8 +643,10 @@ alpn_client_preface_timeout(Config) -> alpn_reject_missing_client_preface(Config) -> doc("Servers must treat an invalid connection preface as a " "connection error of type PROTOCOL_ERROR. (RFC7540 3.3, RFC7540 3.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a SETTINGS frame directly instead of the proper preface. ok = ssl:send(Socket, cow_http2:settings(#{})), @@ -635,8 +660,10 @@ alpn_reject_missing_client_preface(Config) -> alpn_reject_invalid_client_preface(Config) -> doc("Servers must treat an invalid connection preface as a " "connection error of type PROTOCOL_ERROR. (RFC7540 3.3, RFC7540 3.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a slightly incorrect preface. ok = ssl:send(Socket, "PRI * HTTP/2.0\r\n\r\nSM: Value\r\n\r\n"), @@ -650,8 +677,10 @@ alpn_reject_invalid_client_preface(Config) -> alpn_reject_missing_client_preface_settings(Config) -> doc("Servers must treat an invalid connection preface as a " "connection error of type PROTOCOL_ERROR. (RFC7540 3.3, RFC7540 3.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a valid preface sequence except followed by a PING instead of a SETTINGS frame. ok = ssl:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", cow_http2:ping(0)]), @@ -665,8 +694,10 @@ alpn_reject_missing_client_preface_settings(Config) -> alpn_reject_invalid_client_preface_settings(Config) -> doc("Servers must treat an invalid connection preface as a " "connection error of type PROTOCOL_ERROR. (RFC7540 3.3, RFC7540 3.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a valid preface sequence except followed by a badly formed SETTINGS frame. ok = ssl:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", << 0:24, 4:8, 0:9, 1:31 >>]), @@ -679,8 +710,10 @@ alpn_reject_invalid_client_preface_settings(Config) -> alpn_accept_client_preface_empty_settings(Config) -> doc("The SETTINGS frame in the client preface may be empty. (RFC7540 3.3, RFC7540 3.5)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a valid preface sequence except followed by an empty SETTINGS frame. ok = ssl:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", cow_http2:settings(#{})]), @@ -694,8 +727,10 @@ alpn_accept_client_preface_empty_settings(Config) -> alpn_client_preface_settings_ack_timeout(Config) -> doc("Failure to acknowledge the server's SETTINGS frame " "results in a SETTINGS_TIMEOUT connection error. (RFC7540 3.5, RFC7540 6.5.3)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a valid preface. ok = ssl:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", cow_http2:settings(#{})]), @@ -710,8 +745,10 @@ alpn_client_preface_settings_ack_timeout(Config) -> alpn(Config) -> doc("Successful ALPN negotiation. (RFC7540 3.3)"), + TlsOpts = ct_helper:get_certs_from_ets(), {ok, Socket} = ssl:connect("localhost", config(port, Config), - [{alpn_advertised_protocols, [<<"h2">>]}, binary, {active, false}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + binary, {active, false}|TlsOpts]), {ok, <<"h2">>} = ssl:negotiated_protocol(Socket), %% Send a valid preface. %% @todo Use non-empty SETTINGS here. Just because. @@ -735,7 +772,9 @@ alpn(Config) -> prior_knowledge_reject_tls(Config) -> doc("Implementations that support HTTP/2 over TLS must use ALPN. (RFC7540 3.4)"), - {ok, Socket} = ssl:connect("localhost", config(port, Config), [binary, {active, false}]), + TlsOpts = ct_helper:get_certs_from_ets(), + {ok, Socket} = ssl:connect("localhost", config(port, Config), + [binary, {active, false}|TlsOpts]), %% Send a valid preface. ok = ssl:send(Socket, ["PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", cow_http2:settings(#{})]), %% We expect the server to send an HTTP 400 error @@ -1354,7 +1393,8 @@ max_frame_size_allow_exactly_custom(Config0) -> {ok, << Len2:24, 1:8, _:40 >>} = gen_tcp:recv(Socket, 9, 6000), {ok, _} = gen_tcp:recv(Socket, Len2, 6000), %% No errors follow due to our sending of a 25000 bytes frame. - {error, timeout} = gen_tcp:recv(Socket, 0, 1000) + {error, timeout} = gen_tcp:recv(Socket, 0, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -1384,7 +1424,8 @@ max_frame_size_reject_larger_than_custom(Config0) -> cow_http2:data(1, fin, <<0:30001/unit:8>>) ]), %% Receive a FRAME_SIZE_ERROR connection error. - {ok, << _:24, 7:8, _:72, 6:32 >>} = gen_tcp:recv(Socket, 17, 6000) + {ok, << _:24, 7:8, _:72, 6:32 >>} = gen_tcp:recv(Socket, 17, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2599,9 +2640,10 @@ settings_header_table_size_server(Config0) -> {ok, << Len1:24, 1:8, _:40 >>} = gen_tcp:recv(Socket, 9, 6000), {ok, RespHeadersBlock1} = gen_tcp:recv(Socket, Len1, 6000), {RespHeaders, _} = cow_hpack:decode(RespHeadersBlock1, DecodeState), - {_, <<"200">>} = lists:keyfind(<<":status">>, 1, RespHeaders) + {_, <<"200">>} = lists:keyfind(<<":status">>, 1, RespHeaders), %% The decoding succeeded on the server, confirming that %% the table size was updated to HeaderTableSize. + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2630,7 +2672,8 @@ settings_max_concurrent_streams(Config0) -> cow_http2:headers(3, fin, ReqHeadersBlock2) ]), %% Receive a REFUSED_STREAM stream error. - {ok, << _:24, 3:8, _:8, 3:32, 7:32 >>} = gen_tcp:recv(Socket, 13, 6000) + {ok, << _:24, 3:8, _:8, 3:32, 7:32 >>} = gen_tcp:recv(Socket, 13, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2654,7 +2697,8 @@ settings_max_concurrent_streams_0(Config0) -> ]), ok = gen_tcp:send(Socket, cow_http2:headers(1, fin, HeadersBlock)), %% Receive a REFUSED_STREAM stream error. - {ok, << _:24, 3:8, _:8, 1:32, 7:32 >>} = gen_tcp:recv(Socket, 13, 6000) + {ok, << _:24, 3:8, _:8, 1:32, 7:32 >>} = gen_tcp:recv(Socket, 13, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2722,7 +2766,8 @@ settings_initial_window_size(Config0) -> {ok, << Len2:24, 1:8, _:40 >>} = gen_tcp:recv(Socket, 9, 6000), {ok, _} = gen_tcp:recv(Socket, Len2, 6000), %% No errors follow due to our sending of more than 65535 bytes of data. - {error, timeout} = gen_tcp:recv(Socket, 0, 1000) + {error, timeout} = gen_tcp:recv(Socket, 0, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2765,7 +2810,8 @@ settings_initial_window_size_after_ack(Config0) -> cow_http2:data(1, fin, <<0:32/unit:8>>) ]), %% Receive a FLOW_CONTROL_ERROR stream error. - {ok, << _:24, 3:8, _:8, 1:32, 3:32 >>} = gen_tcp:recv(Socket, 13, 6000) + {ok, << _:24, 3:8, _:8, 1:32, 3:32 >>} = gen_tcp:recv(Socket, 13, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2813,7 +2859,8 @@ settings_initial_window_size_before_ack(Config0) -> {ok, << Len2:24, 1:8, _:40 >>} = gen_tcp:recv(Socket, 9, 6000), {ok, _} = gen_tcp:recv(Socket, Len2, 6000), %% No errors follow due to our sending of more than 0 bytes of data. - {error, timeout} = gen_tcp:recv(Socket, 0, 1000) + {error, timeout} = gen_tcp:recv(Socket, 0, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -2846,7 +2893,8 @@ settings_max_frame_size(Config0) -> {ok, << Len2:24, 1:8, _:40 >>} = gen_tcp:recv(Socket, 9, 6000), {ok, _} = gen_tcp:recv(Socket, Len2, 6000), %% No errors follow due to our sending of a 25000 bytes frame. - {error, timeout} = gen_tcp:recv(Socket, 0, 1000) + {error, timeout} = gen_tcp:recv(Socket, 0, 1000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -3095,7 +3143,8 @@ data_reject_overflow(Config0) -> cow_http2:data(1, fin, <<0:15000/unit:8>>) ]), %% Receive a FLOW_CONTROL_ERROR connection error. - {ok, << _:24, 7:8, _:72, 3:32 >>} = gen_tcp:recv(Socket, 17, 6000) + {ok, << _:24, 7:8, _:72, 3:32 >>} = gen_tcp:recv(Socket, 17, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -3143,7 +3192,8 @@ data_reject_overflow_stream(Config0) -> cow_http2:data(1, fin, <<0:15000/unit:8>>) ]), %% Receive a FLOW_CONTROL_ERROR stream error. - {ok, << _:24, 3:8, _:8, 1:32, 3:32 >>} = gen_tcp:recv(Socket, 13, 6000) + {ok, << _:24, 3:8, _:8, 1:32, 3:32 >>} = gen_tcp:recv(Socket, 13, 6000), + gen_tcp:close(Socket) after cowboy:stop_listener(?FUNCTION_NAME) end. @@ -3862,6 +3912,7 @@ accept_host_header_on_missing_pseudo_header_authority(Config) -> %% When both :authority and host headers are received, the current behavior %% is to favor :authority and ignore the host header. The specification does %% not describe the correct behavior to follow in that case. +%% @todo The HTTP/3 spec says both values must be identical and non-empty. reject_many_pseudo_header_authority(Config) -> doc("A request containing more than one authority component must be rejected " diff --git a/test/rfc8297_SUITE.erl b/test/rfc8297_SUITE.erl index 9ae6180..42ae92e 100644 --- a/test/rfc8297_SUITE.erl +++ b/test/rfc8297_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -30,7 +30,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). init_dispatch(_) -> cowboy_router:compile([{"[...]", [ diff --git a/test/rfc8441_SUITE.erl b/test/rfc8441_SUITE.erl index 245658f..b788f9f 100644 --- a/test/rfc8441_SUITE.erl +++ b/test/rfc8441_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -126,6 +126,7 @@ reject_handshake_disabled_by_default(Config0) -> % The Extended CONNECT Method. +%% @todo Refer to RFC9110 7.8 about the case insensitive comparison. accept_uppercase_pseudo_header_protocol(Config) -> doc("The :protocol pseudo header is case insensitive. (draft-01 4)"), %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. @@ -172,6 +173,7 @@ reject_many_pseudo_header_protocol(Config) -> ok. reject_unknown_pseudo_header_protocol(Config) -> + %% @todo This probably shouldn't send 400 but 501 instead based on RFC 9220. doc("An extended CONNECT request with an unknown protocol must be rejected " "with a 400 error. (draft-01 4)"), %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. @@ -192,10 +194,11 @@ reject_unknown_pseudo_header_protocol(Config) -> {ok, << Len1:24, 1:8, _:8, 1:32 >>} = gen_tcp:recv(Socket, 9, 1000), {ok, RespHeadersBlock} = gen_tcp:recv(Socket, Len1, 1000), {RespHeaders, _} = cow_hpack:decode(RespHeadersBlock), - {_, <<"400">>} = lists:keyfind(<<":status">>, 1, RespHeaders), + {_, <<"501">>} = lists:keyfind(<<":status">>, 1, RespHeaders), ok. reject_invalid_pseudo_header_protocol(Config) -> + %% @todo This probably shouldn't send 400 but 501 instead based on RFC 9220. doc("An extended CONNECT request with an invalid protocol must be rejected " "with a 400 error. (draft-01 4)"), %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. @@ -216,7 +219,7 @@ reject_invalid_pseudo_header_protocol(Config) -> {ok, << Len1:24, 1:8, _:8, 1:32 >>} = gen_tcp:recv(Socket, 9, 1000), {ok, RespHeadersBlock} = gen_tcp:recv(Socket, Len1, 1000), {RespHeaders, _} = cow_hpack:decode(RespHeadersBlock), - {_, <<"400">>} = lists:keyfind(<<":status">>, 1, RespHeaders), + {_, <<"501">>} = lists:keyfind(<<":status">>, 1, RespHeaders), ok. reject_missing_pseudo_header_scheme(Config) -> @@ -293,7 +296,7 @@ reject_missing_pseudo_header_protocol(Config) -> %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. {ok, Socket, Settings} = do_handshake(Config), #{enable_connect_protocol := true} = Settings, - %% Send an extended CONNECT request without a :scheme pseudo-header. + %% Send an extended CONNECT request without a :protocol pseudo-header. {ReqHeadersBlock, _} = cow_hpack:encode([ {<<":method">>, <<"CONNECT">>}, {<<":scheme">>, <<"http">>}, @@ -317,7 +320,7 @@ reject_connection_header(Config) -> %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. {ok, Socket, Settings} = do_handshake(Config), #{enable_connect_protocol := true} = Settings, - %% Send an extended CONNECT request without a :scheme pseudo-header. + %% Send an extended CONNECT request with a connection header. {ReqHeadersBlock, _} = cow_hpack:encode([ {<<":method">>, <<"CONNECT">>}, {<<":protocol">>, <<"websocket">>}, @@ -339,7 +342,7 @@ reject_upgrade_header(Config) -> %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. {ok, Socket, Settings} = do_handshake(Config), #{enable_connect_protocol := true} = Settings, - %% Send an extended CONNECT request without a :scheme pseudo-header. + %% Send an extended CONNECT request with a upgrade header. {ReqHeadersBlock, _} = cow_hpack:encode([ {<<":method">>, <<"CONNECT">>}, {<<":protocol">>, <<"websocket">>}, diff --git a/test/rfc9114_SUITE.erl b/test/rfc9114_SUITE.erl new file mode 100644 index 0000000..a03b493 --- /dev/null +++ b/test/rfc9114_SUITE.erl @@ -0,0 +1,2426 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(rfc9114_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). + +-ifdef(COWBOY_QUICER). + +-include_lib("quicer/include/quicer.hrl"). + +all() -> + [{group, h3}]. + +groups() -> + %% @todo Enable parallel tests but for this issues in the + %% QUIC accept loop need to be figured out (can't connect + %% concurrently somehow, no backlog?). + [{h3, [], ct_helper:all(?MODULE)}]. + +init_per_group(Name = h3, Config) -> + cowboy_test:init_http3(Name, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config))} + }, Config). + +end_per_group(Name, _) -> + cowboy_test:stop_group(Name). + +init_routes(_) -> [ + {"localhost", [ + {"/", hello_h, []}, + {"/echo/:key", echo_h, []} + ]} +]. + +%% Starting HTTP/3 for "https" URIs. + +alpn(Config) -> + doc("Successful ALPN negotiation. (RFC9114 3.1)"), + {ok, Conn} = quicer:connect("localhost", config(port, Config), + #{alpn => ["h3"], verify => none}, 5000), + {ok, <<"h3">>} = quicer:negotiated_protocol(Conn), + %% To make sure the connection is fully established we wait + %% to receive the SETTINGS frame on the control stream. + {ok, _ControlRef, _Settings} = do_wait_settings(Conn), + ok. + +alpn_error(Config) -> + doc("Failed ALPN negotiation using the 'h2' token. (RFC9114 3.1)"), + {error, transport_down, #{status := alpn_neg_failure}} + = quicer:connect("localhost", config(port, Config), + #{alpn => ["h2"], verify => none}, 5000), + ok. + +%% @todo 3.2. Connection Establishment +%% After the QUIC connection is established, a SETTINGS frame MUST be sent by each endpoint as the initial frame of their respective HTTP control stream. + +%% @todo 3.3. Connection Reuse +%% Servers are encouraged to maintain open HTTP/3 connections for as long as +%possible but are permitted to terminate idle connections if necessary. When +%either endpoint chooses to close the HTTP/3 connection, the terminating +%endpoint SHOULD first send a GOAWAY frame (Section 5.2) so that both endpoints +%can reliably determine whether previously sent frames have been processed and +%gracefully complete or terminate any necessary remaining tasks. + +%% Frame format. + +req_stream(Config) -> + doc("Complete lifecycle of a request stream. (RFC9114 4.1)"), + {ok, Conn} = quicer:connect("localhost", config(port, Config), + #{alpn => ["h3"], verify => none}, 5000), + %% To make sure the connection is fully established we wait + %% to receive the SETTINGS frame on the control stream. + {ok, ControlRef, _Settings} = do_wait_settings(Conn), + %% Send a request on a request stream. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ], ?QUIC_SEND_FLAG_FIN), + %% Receive the response. + {ok, Data} = do_receive_data(StreamRef), + {HLenEnc, HLenBits} = do_guess_int_encoding(Data), + << + 1, %% HEADERS frame. + HLenEnc:2, HLen:HLenBits, + EncodedResponse:HLen/bytes, + Rest/bits + >> = Data, + {ok, DecodedResponse, _DecData, _DecSt} + = cow_qpack:decode_field_section(EncodedResponse, 0, cow_qpack:init(decoder)), + #{ + <<":status">> := <<"200">>, + <<"content-length">> := BodyLen + } = maps:from_list(DecodedResponse), + {DLenEnc, DLenBits} = do_guess_int_encoding(Rest), + << + 0, %% DATA frame. + DLenEnc:2, DLen:DLenBits, + Body:DLen/bytes + >> = Rest, + <<"Hello world!">> = Body, + BodyLen = integer_to_binary(byte_size(Body)), + ok = do_wait_peer_send_shutdown(StreamRef), + ok = do_wait_stream_closed(StreamRef). + +%% @todo Same test as above but with content-length unset? + +req_stream_two_requests(Config) -> + doc("Receipt of multiple requests on a single stream must " + "be rejected with an H3_MESSAGE_ERROR stream error. " + "(RFC9114 4.1, RFC9114 4.1.2)"), + {ok, Conn} = quicer:connect("localhost", config(port, Config), + #{alpn => ["h3"], verify => none}, 5000), + %% To make sure the connection is fully established we wait + %% to receive the SETTINGS frame on the control stream. + {ok, ControlRef, _Settings} = do_wait_settings(Conn), + %% Send two requests on a request stream. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest1, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedRequest2, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest1)), + EncodedRequest1, + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest2)), + EncodedRequest2 + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = do_wait_stream_aborted(StreamRef), + ok. + +headers_then_trailers(Config) -> + doc("Receipt of HEADERS followed by trailer HEADERS must be accepted. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_data_then_trailers(Config) -> + doc("Receipt of HEADERS followed by DATA followed by trailer HEADERS " + "must be accepted. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +data_then_headers(Config) -> + doc("Receipt of DATA before HEADERS must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +headers_then_trailers_then_data(Config) -> + doc("Receipt of DATA after trailer HEADERS must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">> + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +headers_then_data_then_trailers_then_data(Config) -> + doc("Receipt of DATA after trailer HEADERS must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">> + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +headers_then_data_then_trailers_then_trailers(Config) -> + doc("Receipt of DATA after trailer HEADERS must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers1, _EncData2, EncSt1} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, EncodedTrailers2, _EncData3, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt1), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers1)), + EncodedTrailers1, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers2)), + EncodedTrailers2 + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +unknown_then_headers(Config) -> + doc("Receipt of unknown frame followed by HEADERS " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + unknown_then_headers(Config, do_unknown_frame_type(), + rand:bytes(rand:uniform(4096))). + +unknown_then_headers(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes, + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_unknown(Config) -> + doc("Receipt of HEADERS followed by unknown frame " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_unknown(Config, do_unknown_frame_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_unknown(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_data_then_unknown(Config) -> + doc("Receipt of HEADERS followed by DATA followed by unknown frame " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_data_then_unknown(Config, do_unknown_frame_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_data_then_unknown(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_trailers_then_unknown(Config) -> + doc("Receipt of HEADERS followed by trailer HEADERS followed by unknown frame " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_data_then_unknown(Config, do_unknown_frame_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_trailers_then_unknown(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_data_then_unknown_then_trailers(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "unknown frame followed by trailer HEADERS " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_data_then_unknown_then_trailers(Config, + do_unknown_frame_type(), rand:bytes(rand:uniform(4096))). + +headers_then_data_then_unknown_then_trailers(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_data_then_unknown_then_data(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "unknown frame followed by DATA " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_data_then_unknown_then_data(Config, + do_unknown_frame_type(), rand:bytes(rand:uniform(4096))). + +headers_then_data_then_unknown_then_data(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(6), + <<"Hello ">>, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes, + <<0>>, %% DATA frame. + cow_http3:encode_int(7), + <<"server!">> + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +headers_then_data_then_trailers_then_unknown(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "trailer HEADERS followed by unknown frame " + "must be accepted. (RFC9114 4.1, RFC9114 9)"), + headers_then_data_then_trailers_then_unknown(Config, + do_unknown_frame_type(), rand:bytes(rand:uniform(4096))). + +headers_then_data_then_trailers_then_unknown(Config, Type, Bytes) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello server!">>, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers, + cow_http3:encode_int(Type), %% Unknown frame. + cow_http3:encode_int(iolist_size(Bytes)), + Bytes + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +do_unknown_frame_type() -> + Type = rand:uniform(4611686018427387904) - 1, + %% Retry if we get a value that's specified. + case lists:member(Type, [ + 16#0, 16#1, 16#3, 16#4, 16#5, 16#7, 16#d, %% HTTP/3 core frame types. + 16#2, 16#6, 16#8, 16#9 %% HTTP/3 reserved frame types that must be rejected. + ]) of + true -> do_unknown_frame_type(); + false -> Type + end. + +reserved_then_headers(Config) -> + doc("Receipt of reserved frame followed by HEADERS " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + unknown_then_headers(Config, do_reserved_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_reserved(Config) -> + doc("Receipt of HEADERS followed by reserved frame " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_unknown(Config, do_reserved_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_data_then_reserved(Config) -> + doc("Receipt of HEADERS followed by DATA followed by reserved frame " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_data_then_unknown(Config, do_reserved_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_trailers_then_reserved(Config) -> + doc("Receipt of HEADERS followed by trailer HEADERS followed by reserved frame " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_trailers_then_unknown(Config, do_reserved_type(), + rand:bytes(rand:uniform(4096))). + +headers_then_data_then_reserved_then_trailers(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "reserved frame followed by trailer HEADERS " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_data_then_unknown_then_trailers(Config, + do_reserved_type(), rand:bytes(rand:uniform(4096))). + +headers_then_data_then_reserved_then_data(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "reserved frame followed by DATA " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_data_then_unknown_then_data(Config, + do_reserved_type(), rand:bytes(rand:uniform(4096))). + +headers_then_data_then_trailers_then_reserved(Config) -> + doc("Receipt of HEADERS followed by DATA followed by " + "trailer HEADERS followed by reserved frame " + "must be accepted when the reserved frame type is " + "of the format 0x1f * N + 0x21. (RFC9114 4.1, RFC9114 7.2.8)"), + headers_then_data_then_trailers_then_unknown(Config, + do_reserved_type(), rand:bytes(rand:uniform(4096))). + +reject_transfer_encoding_header_with_body(Config) -> + doc("Requests containing a transfer-encoding header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.1, RFC9114 4.1.2, RFC9114 4.2)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, _EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"transfer-encoding">>, <<"chunked">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(24), + <<"13\r\nHello server!\r\n0\r\n\r\n">> + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = do_wait_stream_aborted(StreamRef), + ok. + +%% 4. Expressing HTTP Semantics in HTTP/3 +%% 4.1. HTTP Message Framing + +%% An HTTP request/response exchange fully consumes a client-initiated +%bidirectional QUIC stream. After sending a request, a client MUST close the +%stream for sending. Unless using the CONNECT method (see Section 4.4), clients +%MUST NOT make stream closure dependent on receiving a response to their +%request. After sending a final response, the server MUST close the stream for +%sending. At this point, the QUIC stream is fully closed. +%% @todo What to do with clients that DON'T close the stream +%% for sending after the request is sent? + +%% If a client-initiated stream terminates without enough of the HTTP message +%to provide a complete response, the server SHOULD abort its response stream +%with the error code H3_REQUEST_INCOMPLETE. +%% @todo difficult!! + +%% When the server does not need to receive the remainder of the request, it +%MAY abort reading the request stream, send a complete response, and cleanly +%close the sending part of the stream. The error code H3_NO_ERROR SHOULD be +%used when requesting that the client stop sending on the request stream. +%% @todo read_body related; h2 has this behavior but there is no corresponding test + +%% 4.1.1. Request Cancellation and Rejection + +%% When possible, it is RECOMMENDED that servers send an HTTP response with an +%appropriate status code rather than cancelling a request it has already begun +%processing. + +%% Implementations SHOULD cancel requests by abruptly terminating any +%directions of a stream that are still open. To do so, an implementation resets +%the sending parts of streams and aborts reading on the receiving parts of +%streams; see Section 2.4 of [QUIC-TRANSPORT]. + +%% When the server cancels a request without performing any application +%processing, the request is considered "rejected". The server SHOULD abort its +%response stream with the error code H3_REQUEST_REJECTED. In this context, +%"processed" means that some data from the stream was passed to some higher +%layer of software that might have taken some action as a result. The client +%can treat requests rejected by the server as though they had never been sent +%at all, thereby allowing them to be retried later. + +%% Servers MUST NOT use the H3_REQUEST_REJECTED error code for requests that +%were partially or fully processed. When a server abandons a response after +%partial processing, it SHOULD abort its response stream with the error code +%H3_REQUEST_CANCELLED. +%% @todo + +%% Client SHOULD use the error code H3_REQUEST_CANCELLED to cancel requests. +%Upon receipt of this error code, a server MAY abruptly terminate the response +%using the error code H3_REQUEST_REJECTED if no processing was performed. +%Clients MUST NOT use the H3_REQUEST_REJECTED error code, except when a server +%has requested closure of the request stream with this error code. +%% @todo + +%4.1.2. Malformed Requests and Responses +%A malformed request or response is one that is an otherwise valid sequence of +%frames but is invalid due to: +% +%the presence of prohibited fields or pseudo-header fields, +%% @todo reject_response_pseudo_headers +%% @todo reject_unknown_pseudo_headers +%% @todo reject_pseudo_headers_in_trailers + +%the absence of mandatory pseudo-header fields, +%invalid values for pseudo-header fields, +%pseudo-header fields after fields, +%% @todo reject_pseudo_headers_after_regular_headers + +%an invalid sequence of HTTP messages, +%the inclusion of invalid characters in field names or values. +% +%A request or response that is defined as having content when it contains a +%Content-Length header field (Section 8.6 of [HTTP]) is malformed if the value +%of the Content-Length header field does not equal the sum of the DATA frame +%lengths received. A response that is defined as never having content, even +%when a Content-Length is present, can have a non-zero Content-Length header +%field even though no content is included in DATA frames. +% +%For malformed requests, a server MAY send an HTTP response indicating the +%error prior to closing or resetting the stream. +%% @todo All the malformed tests + +headers_reject_uppercase_header_name(Config) -> + doc("Requests containing uppercase header names must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"I-AM-GIGANTIC">>, <<"How's the weather up there?">>} + ). + +%% 4.2. HTTP Fields +%% An endpoint MUST NOT generate an HTTP/3 field section containing +%connection-specific fields; any message containing connection-specific fields +%MUST be treated as malformed. + +reject_connection_header(Config) -> + doc("Requests containing a connection header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"connection">>, <<"close">>} + ). + +reject_keep_alive_header(Config) -> + doc("Requests containing a keep-alive header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"keep-alive">>, <<"timeout=5, max=1000">>} + ). + +reject_proxy_authenticate_header(Config) -> + doc("Requests containing a proxy-authenticate header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"proxy-authenticate">>, <<"Basic">>} + ). + +reject_proxy_authorization_header(Config) -> + doc("Requests containing a proxy-authorization header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"proxy-authorization">>, <<"Basic YWxhZGRpbjpvcGVuc2VzYW1l">>} + ). + +reject_transfer_encoding_header(Config) -> + doc("Requests containing a transfer-encoding header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"transfer-encoding">>, <<"chunked">>} + ). + +reject_upgrade_header(Config) -> + doc("Requests containing an upgrade header must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.5, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"upgrade">>, <<"websocket">>} + ). + +accept_te_header_value_trailers(Config) -> + doc("Requests containing a TE header with a value of \"trailers\" " + "must be accepted. (RFC9114 4.2)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>}, + {<<"te">>, <<"trailers">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"content-type">>, <<"text/plain">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +reject_te_header_other_values(Config) -> + doc("Requests containing a TE header with a value other than \"trailers\" must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.2, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<"te">>, <<"trailers, deflate;q=0.5">>} + ). + +%% @todo response_dont_send_header_in_connection +%% @todo response_dont_send_connection_header +%% @todo response_dont_send_keep_alive_header +%% @todo response_dont_send_proxy_connection_header +%% @todo response_dont_send_transfer_encoding_header +%% @todo response_dont_send_upgrade_header + +%% 4.2.1. Field Compression +%% To allow for better compression efficiency, the Cookie header field +%([COOKIES]) MAY be split into separate field lines, each with one or more +%cookie-pairs, before compression. If a decompressed field section contains +%multiple cookie field lines, these MUST be concatenated into a single byte +%string using the two-byte delimiter of "; " (ASCII 0x3b, 0x20) before being +%passed into a context other than HTTP/2 or HTTP/3, such as an HTTP/1.1 +%connection, or a generic HTTP server application. + +%% 4.2.2. Header Size Constraints +%% An HTTP/3 implementation MAY impose a limit on the maximum size of the +%message header it will accept on an individual HTTP message. A server that +%receives a larger header section than it is willing to handle can send an HTTP +%431 (Request Header Fields Too Large) status code ([RFC6585]). The size of a +%field list is calculated based on the uncompressed size of fields, including +%the length of the name and value in bytes plus an overhead of 32 bytes for +%each field. +%% If an implementation wishes to advise its peer of this limit, it can be +%conveyed as a number of bytes in the SETTINGS_MAX_FIELD_SECTION_SIZE +%parameter. + +reject_unknown_pseudo_headers(Config) -> + doc("Requests containing unknown pseudo-headers must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<":upgrade">>, <<"websocket">>} + ). + +reject_response_pseudo_headers(Config) -> + doc("Requests containing response pseudo-headers must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3, RFC9114 4.1.2)"), + do_reject_malformed_header(Config, + {<<":status">>, <<"200">>} + ). + +reject_pseudo_headers_in_trailers(Config) -> + doc("Requests containing pseudo-headers in trailers must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3, RFC9114 4.1.2)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"trailer">>, <<"x-checksum">>} + ], 0, cow_qpack:init(encoder)), + {ok, EncodedTrailers, _EncData2, _EncSt} = cow_qpack:encode_field_section([ + {<<"x-checksum">>, <<"md5:4cc909a007407f3706399b6496babec3">>}, + {<<":path">>, <<"/">>} + ], 0, EncSt0), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(10000), + <<0:10000/unit:8>>, + <<1>>, %% HEADERS frame for trailers. + cow_http3:encode_int(iolist_size(EncodedTrailers)), + EncodedTrailers + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = do_wait_stream_aborted(StreamRef), + ok. + +reject_pseudo_headers_after_regular_headers(Config) -> + doc("Requests containing pseudo-headers after regular headers must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<"content-length">>, <<"0">>}, + {<<":path">>, <<"/">>} + ]). + +reject_userinfo(Config) -> + doc("An authority containing a userinfo component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"user@localhost">>}, + {<<":path">>, <<"/">>} + ]). + +%% To ensure that the HTTP/1.1 request line can be reproduced accurately, this +%% pseudo-header field (:authority) MUST be omitted when translating from an +%% HTTP/1.1 request that has a request target in a method-specific form; +%% see Section 7.1 of [HTTP]. + +reject_empty_path(Config) -> + doc("A request containing an empty path component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<>>} + ]). + +reject_missing_pseudo_header_method(Config) -> + doc("A request without a method component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ]). + +reject_many_pseudo_header_method(Config) -> + doc("A request containing more than one method component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ]). + +reject_missing_pseudo_header_scheme(Config) -> + doc("A request without a scheme component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ]). + +reject_many_pseudo_header_scheme(Config) -> + doc("A request containing more than one scheme component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ]). + +reject_missing_pseudo_header_authority(Config) -> + doc("A request without an authority or host component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>} + ]). + +accept_host_header_on_missing_pseudo_header_authority(Config) -> + doc("A request without an authority but with a host header must be accepted. " + "(RFC9114 4.3.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, _EncSt0} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/">>}, + {<<"host">>, <<"localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +%% @todo +%% If the :scheme pseudo-header field identifies a scheme that has a mandatory +%% authority component (including "http" and "https"), the request MUST contain +%% either an :authority pseudo-header field or a Host header field. +%% - If both fields are present, they MUST NOT be empty. +%% - If both fields are present, they MUST contain the same value. + +reject_many_pseudo_header_authority(Config) -> + doc("A request containing more than one authority component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ]). + +reject_missing_pseudo_header_path(Config) -> + doc("A request without a path component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>} + ]). + +reject_many_pseudo_header_path(Config) -> + doc("A request containing more than one path component must be rejected " + "with an H3_MESSAGE_ERROR stream error. (RFC9114 4.3.1, RFC9114 4.1.2)"), + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<":path">>, <<"/">>} + ]). + +do_reject_malformed_header(Config, Header) -> + do_reject_malformed_headers(Config, [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + Header + ]). + +do_reject_malformed_headers(Config, Headers) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData1, _EncSt0} + = cow_qpack:encode_field_section(Headers, 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = do_wait_stream_aborted(StreamRef), + ok. + +%% For responses, a single ":status" pseudo-header field is defined that +%% carries the HTTP status code; see Section 15 of [HTTP]. This pseudo-header +%% field MUST be included in all responses; otherwise, the response is malformed +%% (see Section 4.1.2). + +%% @todo Implement CONNECT. (RFC9114 4.4. The CONNECT Method) + +%% @todo Maybe block the sending of 101 responses? (RFC9114 4.5. HTTP Upgrade) - also HTTP/2. + +%% @todo Implement server push (RFC9114 4.6. Server Push) + +%% @todo - need a way to list connections +%% 5.2. Connection Shutdown +%% Endpoints initiate the graceful shutdown of an HTTP/3 connection by sending +%% a GOAWAY frame. The GOAWAY frame contains an identifier that indicates to the +%% receiver the range of requests or pushes that were or might be processed in +%% this connection. The server sends a client-initiated bidirectional stream ID; +%% the client sends a push ID. Requests or pushes with the indicated identifier +%% or greater are rejected (Section 4.1.1) by the sender of the GOAWAY. This +%% identifier MAY be zero if no requests or pushes were processed. + +%% @todo +%% Upon sending a GOAWAY frame, the endpoint SHOULD explicitly cancel (see +%% Sections 4.1.1 and 7.2.3) any requests or pushes that have identifiers greater +%% than or equal to the one indicated, in order to clean up transport state for +%% the affected streams. The endpoint SHOULD continue to do so as more requests +%% or pushes arrive. + +%% @todo +%% Endpoints MUST NOT initiate new requests or promise new pushes on the +%% connection after receipt of a GOAWAY frame from the peer. + +%% @todo +%% Requests on stream IDs less than the stream ID in a GOAWAY frame from the +%% server might have been processed; their status cannot be known until a +%% response is received, the stream is reset individually, another GOAWAY is +%% received with a lower stream ID than that of the request in question, or the +%% connection terminates. + +%% @todo +%% Servers MAY reject individual requests on streams below the indicated ID if +%% these requests were not processed. + +%% @todo +%% If a server receives a GOAWAY frame after having promised pushes with a push +%% ID greater than or equal to the identifier contained in the GOAWAY frame, +%% those pushes will not be accepted. + +%% @todo +%% Servers SHOULD send a GOAWAY frame when the closing of a connection is known +%% in advance, even if the advance notice is small, so that the remote peer can +%% know whether or not a request has been partially processed. + +%% @todo +%% An endpoint MAY send multiple GOAWAY frames indicating different +%% identifiers, but the identifier in each frame MUST NOT be greater than the +%% identifier in any previous frame, since clients might already have retried +%% unprocessed requests on another HTTP connection. Receiving a GOAWAY containing +%% a larger identifier than previously received MUST be treated as a connection +%% error of type H3_ID_ERROR. + +%% @todo +%% An endpoint that is attempting to gracefully shut down a connection can send +%% a GOAWAY frame with a value set to the maximum possible value (2^62-4 for +%% servers, 2^62-1 for clients). + +%% @todo +%% Even when a GOAWAY indicates that a given request or push will not be +%% processed or accepted upon receipt, the underlying transport resources still +%% exist. The endpoint that initiated these requests can cancel them to clean up +%% transport state. + +%% @todo +%% Once all accepted requests and pushes have been processed, the endpoint can +%% permit the connection to become idle, or it MAY initiate an immediate closure +%% of the connection. An endpoint that completes a graceful shutdown SHOULD use +%% the H3_NO_ERROR error code when closing the connection. + +%% @todo +%% If a client has consumed all available bidirectional stream IDs with +%% requests, the server need not send a GOAWAY frame, since the client is unable +%% to make further requests. @todo OK that one's some weird stuff lol + +%% @todo +%% 5.3. Immediate Application Closure +%% Before closing the connection, a GOAWAY frame MAY be sent to allow the +%% client to retry some requests. Including the GOAWAY frame in the same packet +%% as the QUIC CONNECTION_CLOSE frame improves the chances of the frame being +%% received by clients. + +bidi_allow_at_least_a_hundred(Config) -> + doc("Endpoints must allow the peer to create at least " + "one hundred bidirectional streams. (RFC9114 6.1"), + #{conn := Conn} = do_connect(Config), + receive + {quic, streams_available, Conn, #{bidi_streams := NumStreams}} -> + true = NumStreams >= 100, + ok + after 5000 -> + error(timeout) + end. + +unidi_allow_at_least_three(Config) -> + doc("Endpoints must allow the peer to create at least " + "three unidirectional streams. (RFC9114 6.2"), + #{conn := Conn} = do_connect(Config), + %% Confirm that the server advertised support for at least 3 unidi streams. + receive + {quic, streams_available, Conn, #{unidi_streams := NumStreams}} -> + true = NumStreams >= 3, + ok + after 5000 -> + error(timeout) + end, + %% Confirm that we can create the unidi streams. + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(ControlRef, [<<0>>, SettingsBin]), + {ok, EncoderRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(EncoderRef, <<2>>), + {ok, DecoderRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(DecoderRef, <<3>>), + %% Streams shouldn't get closed. + fun Loop() -> + receive + %% We don't care about these messages. + {quic, dgram_state_changed, Conn, _} -> + Loop(); + {quic, peer_needs_streams, Conn, _} -> + Loop(); + %% Any other we do care. + Msg -> + error(Msg) + after 1000 -> + ok + end + end(). + +unidi_create_critical_first(Config) -> + doc("Endpoints should create the HTTP control stream as well as " + "the QPACK encoder and decoder streams first. (RFC9114 6.2"), + %% The control stream is accepted in the do_connect/1 function. + #{conn := Conn} = do_connect(Config, #{peer_unidi_stream_count => 3}), + Unidi1 = do_accept_qpack_stream(Conn), + Unidi2 = do_accept_qpack_stream(Conn), + case {Unidi1, Unidi2} of + {{encoder, _}, {decoder, _}} -> + ok; + {{decoder, _}, {encoder, _}} -> + ok + end. + +do_accept_qpack_stream(Conn) -> + receive + {quic, new_stream, StreamRef, #{flags := Flags}} -> + ok = quicer:setopt(StreamRef, active, true), + true = quicer:is_unidirectional(Flags), + receive {quic, <<Type>>, StreamRef, _} -> + {case Type of + 2 -> encoder; + 3 -> decoder + end, StreamRef} + after 5000 -> + error(timeout) + end + after 5000 -> + error(timeout) + end. + +%% @todo We should also confirm that there's at least 1,024 bytes of +%% flow-control credit for each unidi stream the server creates. (How?) +%% It can be set via stream_recv_window_default in quicer. + +unidi_abort_unknown_type(Config) -> + doc("Receipt of an unknown stream type must be aborted " + "with an H3_STREAM_CREATION_ERROR stream error. (RFC9114 6.2, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + %% Create an unknown unidirectional stream. + {ok, StreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(1 + do_reserved_type()), + rand:bytes(rand:uniform(4096)) + ]), + %% The stream should have been aborted. + #{reason := h3_stream_creation_error} = do_wait_stream_aborted(StreamRef), + ok. + +unidi_abort_reserved_type(Config) -> + doc("Receipt of a reserved stream type must be aborted " + "with an H3_STREAM_CREATION_ERROR stream error. " + "(RFC9114 6.2, RFC9114 6.2.3, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + %% Create a reserved unidirectional stream. + {ok, StreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(do_reserved_type()), + rand:bytes(rand:uniform(4096)) + ]), + %% The stream should have been aborted. + #{reason := h3_stream_creation_error} = do_wait_stream_aborted(StreamRef), + ok. + +%% As certain stream types can affect connection state, a recipient SHOULD NOT +%% discard data from incoming unidirectional streams prior to reading the stream type. + +%% Implementations MAY send stream types before knowing whether the peer +%supports them. However, stream types that could modify the state or semantics +%of existing protocol components, including QPACK or other extensions, MUST NOT +%be sent until the peer is known to support them. +%% @todo It may make sense for Cowboy to delay the creation of unidi streams +%% a little in order to save resources. We could create them when the +%% client does as well, or something similar. + +%% A receiver MUST tolerate unidirectional streams being closed or reset prior +%% to the reception of the unidirectional stream header. + +%% Each side MUST initiate a single control stream at the beginning of the +%% connection and send its SETTINGS frame as the first frame on this stream. +%% @todo What to do when the client never opens a control stream? +%% @todo Similarly, a stream could be opened but with no data being sent. +%% @todo Similarly, a control stream could be opened with no SETTINGS frame sent. + +control_reject_first_frame_data(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<0>>, %% DATA frame. + cow_http3:encode_int(12), + <<"Hello world!">> + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_first_frame_headers(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_first_frame_cancel_push(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<3>>, %% CANCEL_PUSH frame. + cow_http3:encode_int(1), + cow_http3:encode_int(0) + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_accept_first_frame_settings(Config) -> + doc("The first frame on a control stream " + "must be a SETTINGS frame. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +control_reject_first_frame_push_promise(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<5>>, %% PUSH_PROMISE frame. + cow_http3:encode_int(iolist_size(EncodedHeaders) + 1), + cow_http3:encode_int(0), + EncodedHeaders + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_first_frame_goaway(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<7>>, %% GOAWAY frame. + cow_http3:encode_int(1), + cow_http3:encode_int(0) + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_first_frame_max_push_id(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<13>>, %% MAX_PUSH_ID frame. + cow_http3:encode_int(1), + cow_http3:encode_int(0) + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_first_frame_reserved(Config) -> + doc("The first frame on a control stream must be a SETTINGS frame " + "or the connection must be closed with an H3_MISSING_SETTINGS " + "connection error. (RFC9114 6.2.1, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + Len = rand:uniform(512), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + cow_http3:encode_int(do_reserved_type()), + cow_http3:encode_int(Len), + rand:bytes(Len) + ]), + %% The connection should have been closed. + #{reason := h3_missing_settings} = do_wait_connection_closed(Conn), + ok. + +control_reject_multiple(Config) -> + doc("Endpoints must not create multiple control streams. (RFC9114 6.2.1)"), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + do_critical_reject_multiple(Config, [<<0>>, SettingsBin]). + +do_critical_reject_multiple(Config, HeaderData) -> + #{conn := Conn} = do_connect(Config), + %% Create two critical streams. + {ok, StreamRef1} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef1, HeaderData), + {ok, StreamRef2} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef2, HeaderData), + %% The connection should have been closed. + #{reason := h3_stream_creation_error} = do_wait_connection_closed(Conn), + ok. + +control_local_closed_abort(Config) -> + doc("Endpoints must not close the control stream. (RFC9114 6.2.1)"), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + do_critical_local_closed_abort(Config, [<<0>>, SettingsBin]). + +do_critical_local_closed_abort(Config, HeaderData) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef, HeaderData), + %% Wait a little to make sure the stream data was received before we abort. + timer:sleep(100), + %% Close the critical stream. + quicer:async_shutdown_stream(StreamRef, ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT, 0), + %% The connection should have been closed. + timer:sleep(1000), + #{reason := h3_closed_critical_stream} = do_wait_connection_closed(Conn), + ok. + +control_local_closed_graceful(Config) -> + doc("Endpoints must not close the control stream. (RFC9114 6.2.1)"), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + do_critical_local_closed_graceful(Config, [<<0>>, SettingsBin]). + +do_critical_local_closed_graceful(Config, HeaderData) -> + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, _} = quicer:send(StreamRef, HeaderData), + %% Close the critical stream. + quicer:async_shutdown_stream(StreamRef, ?QUIC_STREAM_SHUTDOWN_FLAG_GRACEFUL, 0), + %% The connection should have been closed. + #{reason := h3_closed_critical_stream} = do_wait_connection_closed(Conn), + ok. + +control_remote_closed_abort(Config) -> + doc("Endpoints must not close the control stream. (RFC9114 6.2.1)"), + #{conn := Conn, control := ControlRef} = do_connect(Config), + %% Close the control stream. + quicer:async_shutdown_stream(ControlRef, ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT, 0), + %% The connection should have been closed. + #{reason := h3_closed_critical_stream} = do_wait_connection_closed(Conn), + ok. + +%% We cannot gracefully shutdown a remote unidi stream; only abort reading. + +%% Because the contents of the control stream are used to manage the behavior +%% of other streams, endpoints SHOULD provide enough flow-control credit to keep +%% the peer's control stream from becoming blocked. + +%% @todo Implement server push (RFC9114 6.2.2 Push Streams) + +data_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/echo/read_body">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello ">> + ]), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, [ + <<"server!">> + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello server!">> + } = do_receive_response(StreamRef), + ok. + +headers_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + Half = iolist_size(EncodedHeaders) div 2, + <<EncodedHeadersPart1:Half/binary, EncodedHeadersPart2/bits>> + = iolist_to_binary(EncodedHeaders), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeadersPart1 + ]), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, [ + EncodedHeadersPart2 + ]), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +%% @todo Implement server push. cancel_push_frame_can_span_multiple_packets(Config) -> + +settings_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + <<SettingsPart1:1/binary, SettingsPart2/bits>> = SettingsBin, + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsPart1 + ]), + timer:sleep(100), + {ok, _} = quicer:send(ControlRef, [ + SettingsPart2 + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +goaway_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<7>>, cow_http3:encode_int(1) %% GOAWAY part 1. + ]), + timer:sleep(100), + {ok, _} = quicer:send(ControlRef, [ + cow_http3:encode_int(0) %% GOAWAY part 2. + ]), + %% The connection should be closed gracefully. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + h3_no_error = cow_http3:code_to_error(Code), + ok; + %% @todo Temporarily also accept this message. I am + %% not sure why it happens but it isn't wrong per se. + {quic, shutdown, Conn, success} -> + ok + after 1000 -> + error(timeout) + end. + +max_push_id_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<13>>, cow_http3:encode_int(1) %% MAX_PUSH_ID part 1. + ]), + timer:sleep(100), + {ok, _} = quicer:send(ControlRef, [ + cow_http3:encode_int(0) %% MAX_PUSH_ID part 2. + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +unknown_frame_can_span_multiple_packets(Config) -> + doc("HTTP/3 frames can span multiple packets. (RFC9114 7)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(do_unknown_frame_type()), + cow_http3:encode_int(16383) + ]), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, rand:bytes(4096)), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, rand:bytes(4096)), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, rand:bytes(4096)), + timer:sleep(100), + {ok, _} = quicer:send(StreamRef, rand:bytes(4095)), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ], ?QUIC_SEND_FLAG_FIN), + #{ + headers := #{<<":status">> := <<"200">>}, + body := <<"Hello world!">> + } = do_receive_response(StreamRef), + ok. + +%% The DATA and SETTINGS frames can be zero-length therefore +%% they cannot be too short. + +headers_frame_too_short(Config) -> + doc("Frames that terminate before the end of identified fields " + "must be rejected with an H3_FRAME_ERROR connection error. " + "(RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(0) + ]), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +%% @todo Implement server push. cancel_push_frame_too_short(Config) -> + +goaway_frame_too_short(Config) -> + doc("Frames that terminate before the end of identified fields " + "must be rejected with an H3_FRAME_ERROR connection error. " + "(RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<7>>, cow_http3:encode_int(0) %% GOAWAY. + ]), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +max_push_id_frame_too_short(Config) -> + doc("Frames that terminate before the end of identified fields " + "must be rejected with an H3_FRAME_ERROR connection error. " + "(RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<13>>, cow_http3:encode_int(0) %% MAX_PUSH_ID. + ]), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +data_frame_truncated(Config) -> + doc("Truncated frames must be rejected with an " + "H3_FRAME_ERROR connection error. (RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/echo/read_body">>}, + {<<"content-length">>, <<"13">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders, + <<0>>, %% DATA frame. + cow_http3:encode_int(13), + <<"Hello ">> + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +headers_frame_truncated(Config) -> + doc("Truncated frames must be rejected with an " + "H3_FRAME_ERROR connection error. (RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)) + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +%% I am not sure how to test truncated CANCEL_PUSH, SETTINGS, GOAWAY +%% or MAX_PUSH_ID frames, as those are sent on the control stream, +%% which we cannot terminate. + +%% The DATA, HEADERS and SETTINGS frames can be of any length +%% therefore they cannot be too long per se, even if unwanted +%% data can be included at the end of the frame's payload. + +%% @todo Implement server push. cancel_push_frame_too_long(Config) -> + +goaway_frame_too_long(Config) -> + doc("Frames that contain additional bytes after the end of identified fields " + "must be rejected with an H3_FRAME_ERROR connection error. " + "(RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<7>>, cow_http3:encode_int(3), %% GOAWAY. + <<0, 1, 2>> + ]), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +max_push_id_frame_too_long(Config) -> + doc("Frames that contain additional bytes after the end of identified fields " + "must be rejected with an H3_FRAME_ERROR connection error. " + "(RFC9114 7.1, RFC9114 10.8)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<13>>, cow_http3:encode_int(9), %% MAX_PUSH_ID. + <<0, 1, 2, 3, 4, 5, 6, 7, 8>> + ]), + %% The connection should have been closed. + #{reason := h3_frame_error} = do_wait_connection_closed(Conn), + ok. + +%% Streams may terminate abruptly in the middle of frames. + +data_frame_rejected_on_control_stream(Config) -> + doc("DATA frames received on the control stream must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.1)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<0>>, %% DATA frame. + cow_http3:encode_int(12), + <<"Hello world!">> + ]), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +headers_frame_rejected_on_control_stream(Config) -> + doc("HEADERS frames received on the control stream must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.2)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ]), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +%% @todo Implement server push. (RFC9114 7.2.3. CANCEL_PUSH) + +settings_twice(Config) -> + doc("Receipt of a second SETTINGS frame on the control stream " + "must be rejected with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.4)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + SettingsBin + ]), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +settings_on_bidi_stream(Config) -> + doc("Receipt of a SETTINGS frame on a bidirectional stream " + "must be rejected with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.4)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + SettingsBin, + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +settings_identifier_twice(Config) -> + doc("Receipt of a duplicate SETTINGS identifier must be rejected " + "with an H3_SETTINGS_ERROR connection error. (RFC9114 7.2.4)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + SettingsPayload = [ + cow_http3:encode_int(6), cow_http3:encode_int(4096), + cow_http3:encode_int(6), cow_http3:encode_int(8192) + ], + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<4>>, %% SETTINGS frame. + cow_http3:encode_int(iolist_size(SettingsPayload)), + SettingsPayload + ]), + %% The connection should have been closed. + #{reason := h3_settings_error} = do_wait_connection_closed(Conn), + ok. + +settings_ignore_unknown_identifier(Config) -> + doc("Unknown SETTINGS identifiers must be ignored (RFC9114 7.2.4, RFC9114 9)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + SettingsPayload = [ + cow_http3:encode_int(999), cow_http3:encode_int(4096) + ], + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<4>>, %% SETTINGS frame. + cow_http3:encode_int(iolist_size(SettingsPayload)), + SettingsPayload + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +settings_ignore_reserved_identifier(Config) -> + doc("Reserved SETTINGS identifiers must be ignored (RFC9114 7.2.4.1)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + SettingsPayload = [ + cow_http3:encode_int(do_reserved_type()), cow_http3:encode_int(4096) + ], + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<4>>, %% SETTINGS frame. + cow_http3:encode_int(iolist_size(SettingsPayload)), + SettingsPayload + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +%% @todo Check that we send a reserved SETTINGS identifier when sending a +%% non-empty SETTINGS frame. (7.2.4.1. Defined SETTINGS Parameters) + +%% @todo Check that setting SETTINGS_MAX_FIELD_SECTION_SIZE works. + +%% It is unclear whether the SETTINGS identifier 0x00 must be rejected or ignored. + +settings_reject_http2_0x02(Config) -> + do_settings_reject_http2(Config, 2, 1). + +settings_reject_http2_0x03(Config) -> + do_settings_reject_http2(Config, 3, 100). + +settings_reject_http2_0x04(Config) -> + do_settings_reject_http2(Config, 4, 128000). + +settings_reject_http2_0x05(Config) -> + do_settings_reject_http2(Config, 5, 1000000). + +do_settings_reject_http2(Config, Identifier, Value) -> + doc("Receipt of an unused HTTP/2 SETTINGS identifier must be rejected " + "with an H3_SETTINGS_ERROR connection error. (RFC9114 7.2.4, RFC9114 11.2.2)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + SettingsPayload = [ + cow_http3:encode_int(Identifier), cow_http3:encode_int(Value) + ], + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + <<4>>, %% SETTINGS frame. + cow_http3:encode_int(iolist_size(SettingsPayload)), + SettingsPayload + ]), + %% The connection should have been closed. + #{reason := h3_settings_error} = do_wait_connection_closed(Conn), + ok. + +%% 7.2.4.2. Initialization +%% An HTTP implementation MUST NOT send frames or requests that would be +%% invalid based on its current understanding of the peer's settings. +%% @todo In the case of SETTINGS_MAX_FIELD_SECTION_SIZE I don't think we have a choice. + +%% All settings begin at an initial value. Each endpoint SHOULD use these +%% initial values to send messages before the peer's SETTINGS frame has arrived, +%% as packets carrying the settings can be lost or delayed. When the SETTINGS +%% frame arrives, any settings are changed to their new values. + +%% Endpoints MUST NOT require any data to be received from the peer prior to +%% sending the SETTINGS frame; settings MUST be sent as soon as the transport is +%% ready to send data. + +%% @todo Implement 0-RTT. (7.2.4.2. Initialization) + +%% @todo Implement server push. (7.2.5. PUSH_PROMISE) + +goaway_on_bidi_stream(Config) -> + doc("Receipt of a GOAWAY frame on a bidirectional stream " + "must be rejected with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.6)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(StreamRef, [ + <<7>>, cow_http3:encode_int(1), cow_http3:encode_int(0) %% GOAWAY. + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +%% @todo Implement server push. (7.2.6 GOAWAY - will have to reject too large push IDs) + +max_push_id_on_bidi_stream(Config) -> + doc("Receipt of a MAX_PUSH_ID frame on a bidirectional stream " + "must be rejected with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.7)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(StreamRef, [ + <<13>>, cow_http3:encode_int(1), cow_http3:encode_int(0) %% MAX_PUSH_ID. + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +%% @todo Implement server push. (7.2.7 MAX_PUSH_ID) + +max_push_id_reject_lower(Config) -> + doc("Receipt of a MAX_PUSH_ID value lower than previously received " + "must be rejected with an H3_ID_ERROR connection error. (RFC9114 7.2.7)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + <<13>>, cow_http3:encode_int(1), cow_http3:encode_int(20), %% MAX_PUSH_ID. + <<13>>, cow_http3:encode_int(1), cow_http3:encode_int(10) %% MAX_PUSH_ID. + ]), + %% The connection should have been closed. + #{reason := h3_id_error} = do_wait_connection_closed(Conn), + ok. + +reserved_on_control_stream(Config) -> + doc("Receipt of a reserved frame type on a control stream " + "must be ignored. (RFC9114 7.2.8)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + Len = rand:uniform(512), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + cow_http3:encode_int(do_reserved_type()), + cow_http3:encode_int(Len), + rand:bytes(Len) + ]), + %% The connection should remain up. + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + error(Reason) + after 1000 -> + ok + end. + +reserved_reject_http2_0x02_control(Config) -> + do_reserved_reject_http2_control(Config, 2). + +reserved_reject_http2_0x06_control(Config) -> + do_reserved_reject_http2_control(Config, 6). + +reserved_reject_http2_0x08_control(Config) -> + do_reserved_reject_http2_control(Config, 8). + +reserved_reject_http2_0x09_control(Config) -> + do_reserved_reject_http2_control(Config, 9). + +do_reserved_reject_http2_control(Config, Type) -> + doc("Receipt of an unused HTTP/2 frame type must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.8, RFC9114 11.2.1)"), + #{conn := Conn} = do_connect(Config), + {ok, ControlRef} = quicer:start_stream(Conn, + #{open_flag => ?QUIC_STREAM_OPEN_FLAG_UNIDIRECTIONAL}), + {ok, SettingsBin, _HTTP3Machine0} = cow_http3_machine:init(client, #{}), + Len = rand:uniform(512), + {ok, _} = quicer:send(ControlRef, [ + <<0>>, %% CONTROL stream. + SettingsBin, + cow_http3:encode_int(Type), + cow_http3:encode_int(Len), + rand:bytes(Len) + ]), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +reserved_reject_http2_0x02_bidi(Config) -> + do_reserved_reject_http2_bidi(Config, 2). + +reserved_reject_http2_0x06_bidi(Config) -> + do_reserved_reject_http2_bidi(Config, 6). + +reserved_reject_http2_0x08_bidi(Config) -> + do_reserved_reject_http2_bidi(Config, 8). + +reserved_reject_http2_0x09_bidi(Config) -> + do_reserved_reject_http2_bidi(Config, 9). + +do_reserved_reject_http2_bidi(Config, Type) -> + doc("Receipt of an unused HTTP/2 frame type must be rejected " + "with an H3_FRAME_UNEXPECTED connection error. (RFC9114 7.2.8, RFC9114 11.2.1)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedHeaders, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, + {<<":path">>, <<"/">>}, + {<<"content-length">>, <<"0">>} + ], 0, cow_qpack:init(encoder)), + Len = rand:uniform(512), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(Type), + cow_http3:encode_int(Len), + rand:bytes(Len), + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedHeaders)), + EncodedHeaders + ], ?QUIC_SEND_FLAG_FIN), + %% The connection should have been closed. + #{reason := h3_frame_unexpected} = do_wait_connection_closed(Conn), + ok. + +%% An endpoint MAY choose to treat a stream error as a connection error under +%% certain circumstances, closing the entire connection in response to a +%% condition on a single stream. + +%% Because new error codes can be defined without negotiation (see Section 9), +%% use of an error code in an unexpected context or receipt of an unknown error +%% code MUST be treated as equivalent to H3_NO_ERROR. + +%% 8.1. HTTP/3 Error Codes +%% H3_INTERNAL_ERROR (0x0102): An internal error has occurred in the HTTP stack. +%% H3_EXCESSIVE_LOAD (0x0107): The endpoint detected that its peer is +%% exhibiting a behavior that might be generating excessive load. +%% H3_MISSING_SETTINGS (0x010a): No SETTINGS frame was received +%% at the beginning of the control stream. +%% H3_REQUEST_REJECTED (0x010b): A server rejected a request without +%% performing any application processing. +%% H3_REQUEST_CANCELLED (0x010c): The request or its response +%% (including pushed response) is cancelled. +%% H3_REQUEST_INCOMPLETE (0x010d): The client's stream terminated +%% without containing a fully formed request. +%% H3_CONNECT_ERROR (0x010f): The TCP connection established in +%% response to a CONNECT request was reset or abnormally closed. +%% H3_VERSION_FALLBACK (0x0110): The requested operation cannot +%% be served over HTTP/3. The peer should retry over HTTP/1.1. + +%% 9. Extensions to HTTP/3 +%% If a setting is used for extension negotiation, the default value MUST be +%% defined in such a fashion that the extension is disabled if the setting is +%% omitted. + +%% 10. Security Considerations +%% 10.3. Intermediary-Encapsulation Attacks +%% Requests or responses containing invalid field names MUST be treated as malformed. +%% Any request or response that contains a character not permitted in a field +%% value MUST be treated as malformed. + +%% 10.5. Denial-of-Service Considerations +%% Implementations SHOULD track the use of these features and set limits on +%% their use. An endpoint MAY treat activity that is suspicious as a connection +%% error of type H3_EXCESSIVE_LOAD, but false positives will result in disrupting +%% valid connections and requests. + +reject_large_unknown_frame(Config) -> + doc("Large unknown frames may risk denial-of-service " + "and should be rejected. (RFC9114 10.5)"), + #{conn := Conn} = do_connect(Config), + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, _} = quicer:send(StreamRef, [ + cow_http3:encode_int(do_unknown_frame_type()), + cow_http3:encode_int(16385) + ]), + #{reason := h3_excessive_load} = do_wait_connection_closed(Conn), + ok. + +%% 10.5.1. Limits on Field Section Size +%% An endpoint can use the SETTINGS_MAX_FIELD_SECTION_SIZE (Section 4.2.2) +%% setting to advise peers of limits that might apply on the size of field +%% sections. +%% +%% A server that receives a larger field section than it is willing to handle +%% can send an HTTP 431 (Request Header Fields Too Large) status code +%% ([RFC6585]). + +%% 10.6. Use of Compression +%% Implementations communicating on a secure channel MUST NOT compress content +%% that includes both confidential and attacker-controlled data unless separate +%% compression contexts are used for each source of data. Compression MUST NOT be +%% used if the source of data cannot be reliably determined. + +%% 10.9. Early Data +%% The anti-replay mitigations in [HTTP-REPLAY] MUST be applied when using HTTP/3 with 0-RTT. + +%% 10.10. Migration +%% Certain HTTP implementations use the client address for logging or +%% access-control purposes. Since a QUIC client's address might change during a +%% connection (and future versions might support simultaneous use of multiple +%% addresses), such implementations will need to either actively retrieve the +%% client's current address or addresses when they are relevant or explicitly +%% accept that the original address might change. @todo Document this behavior. + +%% Appendix A. Considerations for Transitioning from HTTP/2 +%% A.1. Streams +%% QUIC considers a stream closed when all data has been received and sent data +%% has been acknowledged by the peer. HTTP/2 considers a stream closed when the +%% frame containing the END_STREAM bit has been committed to the transport. As a +%% result, the stream for an equivalent exchange could remain "active" for a +%% longer period of time. HTTP/3 servers might choose to permit a larger number +%% of concurrent client-initiated bidirectional streams to achieve equivalent +%% concurrency to HTTP/2, depending on the expected usage patterns. @todo Document this. + +%% Helper functions. + +%% @todo Maybe have a function in cow_http3. +do_reserved_type() -> + 16#1f * (rand:uniform(148764065110560900) - 1) + 16#21. + +do_connect(Config) -> + do_connect(Config, #{}). + +do_connect(Config, Opts) -> + {ok, Conn} = quicer:connect("localhost", config(port, Config), + Opts#{alpn => ["h3"], verify => none}, 5000), + %% To make sure the connection is fully established we wait + %% to receive the SETTINGS frame on the control stream. + {ok, ControlRef, Settings} = do_wait_settings(Conn), + #{ + conn => Conn, + control => ControlRef, %% This is the peer control stream. + settings => Settings + }. + +do_wait_settings(Conn) -> + receive + {quic, new_stream, StreamRef, #{flags := Flags}} -> + ok = quicer:setopt(StreamRef, active, true), + true = quicer:is_unidirectional(Flags), + receive {quic, << + 0, %% Control stream. + SettingsFrame/bits + >>, StreamRef, _} -> + {ok, {settings, Settings}, <<>>} = cow_http3:parse(SettingsFrame), + {ok, StreamRef, Settings} + after 5000 -> + {error, timeout} + end + after 5000 -> + {error, timeout} + end. + +do_receive_data(StreamRef) -> + receive + {quic, Data, StreamRef, _Flags} when is_binary(Data) -> + {ok, Data} + after 5000 -> + {error, timeout} + end. + +do_guess_int_encoding(Data) -> + SizeWithLen = byte_size(Data) - 1, + if + SizeWithLen < 64 + 1 -> + {0, 6}; + SizeWithLen < 16384 + 2 -> + {1, 14}; + SizeWithLen < 1073741824 + 4 -> + {2, 30}; + SizeWithLen < 4611686018427387904 + 8 -> + {3, 62} + end. + +do_wait_peer_send_shutdown(StreamRef) -> + receive + {quic, peer_send_shutdown, StreamRef, undefined} -> + ok + after 5000 -> + {error, timeout} + end. + +do_wait_stream_aborted(StreamRef) -> + receive + {quic, peer_send_aborted, StreamRef, Code} -> + Reason = cow_http3:code_to_error(Code), + #{reason => Reason}; + {quic, peer_receive_aborted, StreamRef, Code} -> + Reason = cow_http3:code_to_error(Code), + #{reason => Reason} + after 5000 -> + {error, timeout} + end. + +do_wait_stream_closed(StreamRef) -> + receive + {quic, stream_closed, StreamRef, #{error := Error, is_conn_shutdown := false}} -> + 0 = Error, + ok + after 5000 -> + {error, timeout} + end. + +do_receive_response(StreamRef) -> + {ok, Data} = do_receive_data(StreamRef), + {HLenEnc, HLenBits} = do_guess_int_encoding(Data), + << + 1, %% HEADERS frame. + HLenEnc:2, HLen:HLenBits, + EncodedResponse:HLen/bytes, + Rest/bits + >> = Data, + {ok, DecodedResponse, _DecData, _DecSt} + = cow_qpack:decode_field_section(EncodedResponse, 0, cow_qpack:init(decoder)), + Headers = maps:from_list(DecodedResponse), + #{<<"content-length">> := BodyLen} = Headers, + {DLenEnc, DLenBits} = do_guess_int_encoding(Rest), + Body = case Rest of + <<>> -> + <<>>; + << + 0, %% DATA frame. + DLenEnc:2, DLen:DLenBits, + Body0:DLen/bytes + >> -> + BodyLen = integer_to_binary(byte_size(Body0)), + Body0 + end, + ok = do_wait_peer_send_shutdown(StreamRef), + #{ + headers => Headers, + body => Body + }. + +do_wait_connection_closed(Conn) -> + receive + {quic, shutdown, Conn, {unknown_quic_status, Code}} -> + Reason = cow_http3:code_to_error(Code), + #{reason => Reason} + after 5000 -> + {error, timeout} + end. + +-endif. diff --git a/test/rfc9114_SUITE_data/client.key b/test/rfc9114_SUITE_data/client.key new file mode 100644 index 0000000..9c5e1ce --- /dev/null +++ b/test/rfc9114_SUITE_data/client.key @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgVJakPYfQA1Hr6Gnq +GYmpMfXpxUi2QwDBrZfw8dBcVqKhRANCAAQDHeeAvjwD7p+Mg1F+G9FBNy+7Wcms +HEw4sGMzhUL4wjwsqKHpoiuQg3qUXXK0gamx0l77vFjrUc6X1al4+ZM5 +-----END PRIVATE KEY----- diff --git a/test/rfc9114_SUITE_data/client.pem b/test/rfc9114_SUITE_data/client.pem new file mode 100644 index 0000000..cd9dc8c --- /dev/null +++ b/test/rfc9114_SUITE_data/client.pem @@ -0,0 +1,12 @@ +-----BEGIN CERTIFICATE----- +MIIBtTCCAVugAwIBAgIUeAPi9oyMIE/KRpsRdukfx2eMuuswCgYIKoZIzj0EAwIw +IDELMAkGA1UEBhMCU0UxETAPBgNVBAoMCE5PQk9EWUFCMB4XDTIzMDcwNTEwMjIy +MloXDTI0MTExNjEwMjIyMlowMTELMAkGA1UEBhMCU0UxETAPBgNVBAoMCE5PQk9E +WUFCMQ8wDQYDVQQDDAZjbGllbnQwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAAQD +HeeAvjwD7p+Mg1F+G9FBNy+7WcmsHEw4sGMzhUL4wjwsqKHpoiuQg3qUXXK0gamx +0l77vFjrUc6X1al4+ZM5o2IwYDALBgNVHQ8EBAMCA4gwEQYDVR0RBAowCIIGY2xp +ZW50MB0GA1UdDgQWBBTnhPpO+rSIFAxvkwVjlkKOO2jOeDAfBgNVHSMEGDAWgBSD +Hw8A4XXG3jB1Atrqux7AUsf+KjAKBggqhkjOPQQDAgNIADBFAiEA2qf29EBp2hcL +sEO7MM0ZLm4gnaMdcxtyneF3+c7Lg3cCIBFTVP8xHlhCJyb8ESV7S052VU0bKQFN +ioyoYtcycxuZ +-----END CERTIFICATE----- diff --git a/test/rfc9114_SUITE_data/server.key b/test/rfc9114_SUITE_data/server.key new file mode 100644 index 0000000..45ea890 --- /dev/null +++ b/test/rfc9114_SUITE_data/server.key @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgvykUYMOS2gW8XTTh +HgmeJM36NT8GGTNXzzt4sIs0o9ahRANCAATnQOMkKbLFQCZY/cxf8otEJG2tVuG6 +QvLqUdERV2+gzE+4ROGDqbb2Jk1szyz4CfBMB4ZfLA/PdSiO+KrOeOcj +-----END PRIVATE KEY----- diff --git a/test/rfc9114_SUITE_data/server.pem b/test/rfc9114_SUITE_data/server.pem new file mode 100644 index 0000000..43cce8e --- /dev/null +++ b/test/rfc9114_SUITE_data/server.pem @@ -0,0 +1,12 @@ +-----BEGIN CERTIFICATE----- +MIIBtTCCAVugAwIBAgIUeAPi9oyMIE/KRpsRdukfx2eMuuowCgYIKoZIzj0EAwIw +IDELMAkGA1UEBhMCU0UxETAPBgNVBAoMCE5PQk9EWUFCMB4XDTIzMDcwNTEwMjIy +MloXDTI0MTExNjEwMjIyMlowMTELMAkGA1UEBhMCU0UxETAPBgNVBAoMCE5PQk9E +WUFCMQ8wDQYDVQQDDAZzZXJ2ZXIwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAATn +QOMkKbLFQCZY/cxf8otEJG2tVuG6QvLqUdERV2+gzE+4ROGDqbb2Jk1szyz4CfBM +B4ZfLA/PdSiO+KrOeOcjo2IwYDALBgNVHQ8EBAMCA4gwEQYDVR0RBAowCIIGc2Vy +dmVyMB0GA1UdDgQWBBS+Np5J8BtmWU534pm9hqhrG/EQ7zAfBgNVHSMEGDAWgBSD +Hw8A4XXG3jB1Atrqux7AUsf+KjAKBggqhkjOPQQDAgNIADBFAiEApRfjIEJfO1VH +ETgNG3/MzDayYScPocVn4v8U15ygEw8CIFUY3xMZzJ5AmiRe9PhIUgueOKQNMtds +wdF9+097+Ey0 +-----END CERTIFICATE----- diff --git a/test/rfc9204_SUITE.erl b/test/rfc9204_SUITE.erl new file mode 100644 index 0000000..942c41b --- /dev/null +++ b/test/rfc9204_SUITE.erl @@ -0,0 +1,357 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(rfc9204_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). + +-ifdef(COWBOY_QUICER). + +-include_lib("quicer/include/quicer.hrl"). + +all() -> + [{group, h3}]. + +groups() -> + %% @todo Enable parallel tests but for this issues in the + %% QUIC accept loop need to be figured out (can't connect + %% concurrently somehow, no backlog?). + [{h3, [], ct_helper:all(?MODULE)}]. + +init_per_group(Name = h3, Config) -> + cowboy_test:init_http3(Name, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config))} + }, Config). + +end_per_group(Name, _) -> + cowboy_test:stop_group(Name). + +init_routes(_) -> [ + {"localhost", [ + {"/", hello_h, []} + ]} +]. + +%% Encoder. + +%% 2.1 +%% QPACK preserves the ordering of field lines within +%% each field section. An encoder MUST emit field +%% representations in the order they appear in the +%% input field section. + +%% 2.1.1 +%% If the dynamic table does not contain enough room +%% for a new entry without evicting other entries, +%% and the entries that would be evicted are not evictable, +%% the encoder MUST NOT insert that entry into the dynamic +%% table (including duplicates of existing entries). +%% In order to avoid this, an encoder that uses the +%% dynamic table has to keep track of each dynamic +%% table entry referenced by each field section until +%% those representations are acknowledged by the decoder; +%% see Section 4.4.1. + +%% 2.1.2 +%% The decoder specifies an upper bound on the number +%% of streams that can be blocked using the +%% SETTINGS_QPACK_BLOCKED_STREAMS setting; see Section 5. +%% An encoder MUST limit the number of streams that could +%% become blocked to the value of SETTINGS_QPACK_BLOCKED_STREAMS +%% at all times. If a decoder encounters more blocked streams +%% than it promised to support, it MUST treat this as a +%% connection error of type QPACK_DECOMPRESSION_FAILED. + +%% 2.1.3 +%% To avoid these deadlocks, an encoder SHOULD NOT +%% write an instruction unless sufficient stream and +%% connection flow-control credit is available for +%% the entire instruction. + +%% Decoder. + +%% 2.2 +%% The decoder MUST emit field lines in the order their +%% representations appear in the encoded field section. + +%% 2.2.1 +%% While blocked, encoded field section data SHOULD +%% remain in the blocked stream's flow-control window. + +%% If it encounters a Required Insert Count smaller than +%% expected, it MUST treat this as a connection error of +%% type QPACK_DECOMPRESSION_FAILED; see Section 2.2.3. + +%% If it encounters a Required Insert Count larger than +%% expected, it MAY treat this as a connection error of +%% type QPACK_DECOMPRESSION_FAILED. + +%% After the decoder finishes decoding a field section +%% encoded using representations containing dynamic table +%% references, it MUST emit a Section Acknowledgment +%% instruction (Section 4.4.1). + +%% 2.2.2.2 +%% A decoder with a maximum dynamic table capacity +%% (Section 3.2.3) equal to zero MAY omit sending Stream +%% Cancellations, because the encoder cannot have any +%% dynamic table references. + +%% 2.2.3 +%% If the decoder encounters a reference in a field line +%% representation to a dynamic table entry that has already +%% been evicted or that has an absolute index greater than +%% or equal to the declared Required Insert Count (Section 4.5.1), +%% it MUST treat this as a connection error of type +%% QPACK_DECOMPRESSION_FAILED. + +%% If the decoder encounters a reference in an encoder +%% instruction to a dynamic table entry that has already +%% been evicted, it MUST treat this as a connection error +%% of type QPACK_ENCODER_STREAM_ERROR. + +%% Static table. + +%% 3.1 +%% When the decoder encounters an invalid static table index +%% in a field line representation, it MUST treat this as a +%% connection error of type QPACK_DECOMPRESSION_FAILED. +%% +%% If this index is received on the encoder stream, this +%% MUST be treated as a connection error of type +%% QPACK_ENCODER_STREAM_ERROR. + +%% Dynamic table. + +%% 3.2 +%% The dynamic table can contain duplicate entries +%% (i.e., entries with the same name and same value). +%% Therefore, duplicate entries MUST NOT be treated +%% as an error by the decoder. + +%% 3.2.2 +%% The encoder MUST NOT cause a dynamic table entry to be +%% evicted unless that entry is evictable; see Section 2.1.1. + +%% It is an error if the encoder attempts to add an entry +%% that is larger than the dynamic table capacity; the +%% decoder MUST treat this as a connection error of type +%% QPACK_ENCODER_STREAM_ERROR. + +%% 3.2.3 +%% The encoder MUST NOT set a dynamic table capacity that +%% exceeds this maximum, but it can choose to use a lower +%% dynamic table capacity; see Section 4.3.1. + +%% When the client's 0-RTT value of the SETTING is zero, +%% the server MAY set it to a non-zero value in its SETTINGS +%% frame. If the remembered value is non-zero, the server +%% MUST send the same non-zero value in its SETTINGS frame. +%% If it specifies any other value, or omits +%% SETTINGS_QPACK_MAX_TABLE_CAPACITY from SETTINGS, +%% the encoder must treat this as a connection error of +%% type QPACK_DECODER_STREAM_ERROR. + +%% When the maximum table capacity is zero, the encoder +%% MUST NOT insert entries into the dynamic table and +%% MUST NOT send any encoder instructions on the encoder stream. + +%% Wire format. + +%% 4.1.1 +%% QPACK implementations MUST be able to decode integers +%% up to and including 62 bits long. + +%% Encoder and decoder streams. + +decoder_reject_multiple(Config) -> + doc("Endpoints must not create multiple decoder streams. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_reject_multiple(Config, <<3>>). + +encoder_reject_multiple(Config) -> + doc("Endpoints must not create multiple encoder streams. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_reject_multiple(Config, <<2>>). + +%% 4.2 +%% The sender MUST NOT close either of these streams, +%% and the receiver MUST NOT request that the sender close +%% either of these streams. Closure of either unidirectional +%% stream type MUST be treated as a connection error of type +%% H3_CLOSED_CRITICAL_STREAM. + +decoder_local_closed_abort(Config) -> + doc("Endpoints must not close the decoder stream. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_local_closed_abort(Config, <<3>>). + +decoder_local_closed_graceful(Config) -> + doc("Endpoints must not close the decoder stream. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_local_closed_graceful(Config, <<3>>). + +decoder_remote_closed_abort(Config) -> + doc("Endpoints must not close the decoder stream. (RFC9204 4.2)"), + #{conn := Conn} = rfc9114_SUITE:do_connect(Config, #{peer_unidi_stream_count => 3}), + {ok, #{decoder := StreamRef}} = do_wait_unidi_streams(Conn, #{}), + %% Close the control stream. + quicer:async_shutdown_stream(StreamRef, ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT, 0), + %% The connection should have been closed. + #{reason := h3_closed_critical_stream} = rfc9114_SUITE:do_wait_connection_closed(Conn), + ok. + +encoder_local_closed_abort(Config) -> + doc("Endpoints must not close the encoder stream. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_local_closed_abort(Config, <<2>>). + +encoder_local_closed_graceful(Config) -> + doc("Endpoints must not close the encoder stream. (RFC9204 4.2)"), + rfc9114_SUITE:do_critical_local_closed_graceful(Config, <<2>>). + +encoder_remote_closed_abort(Config) -> + doc("Endpoints must not close the encoder stream. (RFC9204 4.2)"), + #{conn := Conn} = rfc9114_SUITE:do_connect(Config, #{peer_unidi_stream_count => 3}), + {ok, #{encoder := StreamRef}} = do_wait_unidi_streams(Conn, #{}), + %% Close the control stream. + quicer:async_shutdown_stream(StreamRef, ?QUIC_STREAM_SHUTDOWN_FLAG_ABORT, 0), + %% The connection should have been closed. + #{reason := h3_closed_critical_stream} = rfc9114_SUITE:do_wait_connection_closed(Conn), + ok. + +do_wait_unidi_streams(_, Acc=#{decoder := _, encoder := _}) -> + {ok, Acc}; +do_wait_unidi_streams(Conn, Acc) -> + receive + {quic, new_stream, StreamRef, #{flags := Flags}} -> + ok = quicer:setopt(StreamRef, active, true), + true = quicer:is_unidirectional(Flags), + receive {quic, <<TypeValue>>, StreamRef, _} -> + Type = case TypeValue of + 2 -> encoder; + 3 -> decoder + end, + do_wait_unidi_streams(Conn, Acc#{Type => StreamRef}) + after 5000 -> + {error, timeout} + end + after 5000 -> + {error, timeout} + end. + +%% An endpoint MAY avoid creating an encoder stream if it will +%% not be used (for example, if its encoder does not wish to +%% use the dynamic table or if the maximum size of the dynamic +%% table permitted by the peer is zero). + +%% An endpoint MAY avoid creating a decoder stream if its +%% decoder sets the maximum capacity of the dynamic table to zero. + +%% An endpoint MUST allow its peer to create an encoder stream +%% and a decoder stream even if the connection's settings +%% prevent their use. + +%% Encoder instructions. + +%% 4.3.1 +%% The new capacity MUST be lower than or equal to the limit +%% described in Section 3.2.3. In HTTP/3, this limit is the +%% value of the SETTINGS_QPACK_MAX_TABLE_CAPACITY parameter +%% (Section 5) received from the decoder. The decoder MUST +%% treat a new dynamic table capacity value that exceeds this +%% limit as a connection error of type QPACK_ENCODER_STREAM_ERROR. + +%% Reducing the dynamic table capacity can cause entries to be +%% evicted; see Section 3.2.2. This MUST NOT cause the eviction +%% of entries that are not evictable; see Section 2.1.1. + +%% Decoder instructions. + +%% 4.4.1 +%% If an encoder receives a Section Acknowledgment instruction +%% referring to a stream on which every encoded field section +%% with a non-zero Required Insert Count has already been +%% acknowledged, this MUST be treated as a connection error +%% of type QPACK_DECODER_STREAM_ERROR. + +%% 4.4.3 +%% An encoder that receives an Increment field equal to zero, +%% or one that increases the Known Received Count beyond what +%% the encoder has sent, MUST treat this as a connection error +%% of type QPACK_DECODER_STREAM_ERROR. + +%% Field line representation. + +%% 4.5.1.1 +%% If the decoder encounters a value of EncodedInsertCount that +%% could not have been produced by a conformant encoder, it MUST +%% treat this as a connection error of type QPACK_DECOMPRESSION_FAILED. + +%% 4.5.1.2 +%% The value of Base MUST NOT be negative. Though the protocol +%% might operate correctly with a negative Base using post-Base +%% indexing, it is unnecessary and inefficient. An endpoint MUST +%% treat a field block with a Sign bit of 1 as invalid if the +%% value of Required Insert Count is less than or equal to the +%% value of Delta Base. + +%% 4.5.4 +%% When the 'N' bit is set, the encoded field line MUST always +%% be encoded with a literal representation. In particular, +%% when a peer sends a field line that it received represented +%% as a literal field line with the 'N' bit set, it MUST use a +%% literal representation to forward this field line. This bit +%% is intended for protecting field values that are not to be +%% put at risk by compressing them; see Section 7.1 for more details. + +%% Configuration. + +%% 5 +%% SETTINGS_QPACK_MAX_TABLE_CAPACITY +%% SETTINGS_QPACK_BLOCKED_STREAMS + +%% Security considerations. + +%% 7.1.2 +%% (security if used as a proxy merging many connections into one) +%% An ideal solution segregates access to the dynamic table +%% based on the entity that is constructing the message. +%% Field values that are added to the table are attributed +%% to an entity, and only the entity that created a particular +%% value can extract that value. + +%% 7.1.3 +%% An intermediary MUST NOT re-encode a value that uses a +%% literal representation with the 'N' bit set with another +%% representation that would index it. If QPACK is used for +%% re-encoding, a literal representation with the 'N' bit set +%% MUST be used. If HPACK is used for re-encoding, the +%% never-indexed literal representation (see Section 6.2.3 +%% of [RFC7541]) MUST be used. + +%% 7.4 +%% An implementation has to set a limit for the values it +%% accepts for integers, as well as for the encoded length; +%% see Section 4.1.1. In the same way, it has to set a limit +%% to the length it accepts for string literals; see Section 4.1.2. +%% These limits SHOULD be large enough to process the largest +%% individual field the HTTP implementation can be configured +%% to accept. + +%% If an implementation encounters a value larger than it is +%% able to decode, this MUST be treated as a stream error of +%% type QPACK_DECOMPRESSION_FAILED if on a request stream or +%% a connection error of the appropriate type if on the +%% encoder or decoder stream. + +-endif. diff --git a/test/rfc9220_SUITE.erl b/test/rfc9220_SUITE.erl new file mode 100644 index 0000000..38a59b2 --- /dev/null +++ b/test/rfc9220_SUITE.erl @@ -0,0 +1,485 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(rfc9220_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). + +all() -> + [{group, enabled}]. + +groups() -> + Tests = ct_helper:all(?MODULE), + [{enabled, [], Tests}]. %% @todo Enable parallel when all is better. + +init_per_group(Name = enabled, Config) -> + cowboy_test:init_http3(Name, #{ + enable_connect_protocol => true, + env => #{dispatch => cowboy_router:compile(init_routes(Config))} + }, Config). + +end_per_group(Name, _) -> + cowboy_test:stop_group(Name). + +init_routes(_) -> [ + {"localhost", [ + {"/ws", ws_echo, []} + ]} +]. + +% The SETTINGS_ENABLE_CONNECT_PROTOCOL SETTINGS Parameter. + +% The new parameter name is SETTINGS_ENABLE_CONNECT_PROTOCOL. The +% value of the parameter MUST be 0 or 1. + +% Upon receipt of SETTINGS_ENABLE_CONNECT_PROTOCOL with a value of 1 a +% client MAY use the Extended CONNECT definition of this document when +% creating new streams. Receipt of this parameter by a server does not +% have any impact. +%% @todo ignore_client_enable_setting(Config) -> + +reject_handshake_when_disabled(Config0) -> + doc("Extended CONNECT requests MUST be rejected with a " + "H3_MESSAGE_ERROR stream error when enable_connect_protocol=false. " + "(RFC9220, RFC8441 4)"), + Config = cowboy_test:init_http3(disabled, #{ + enable_connect_protocol => false, + env => #{dispatch => cowboy_router:compile(init_routes(Config0))} + }, Config0), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 0. + #{ + conn := Conn, + settings := Settings + } = rfc9114_SUITE:do_connect(Config), + case Settings of + #{enable_connect_protocol := false} -> ok; + _ when map_size(Settings) =:= 0 -> ok + end, + %% Send a CONNECT :protocol request to upgrade the stream to Websocket. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +reject_handshake_disabled_by_default(Config0) -> + doc("Extended CONNECT requests MUST be rejected with a " + "H3_MESSAGE_ERROR stream error when enable_connect_protocol=false. " + "(RFC9220, RFC8441 4)"), + Config = cowboy_test:init_http3(disabled, #{ + env => #{dispatch => cowboy_router:compile(init_routes(Config0))} + }, Config0), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 0. + #{ + conn := Conn, + settings := Settings + } = rfc9114_SUITE:do_connect(Config), + case Settings of + #{enable_connect_protocol := false} -> ok; + _ when map_size(Settings) =:= 0 -> ok + end, + %% Send a CONNECT :protocol request to upgrade the stream to Websocket. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +% The Extended CONNECT Method. + +accept_uppercase_pseudo_header_protocol(Config) -> + doc("The :protocol pseudo header is case insensitive. (RFC9220, RFC8441 4, RFC9110 7.8)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send a CONNECT :protocol request to upgrade the stream to Websocket. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"WEBSOCKET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% Receive a 200 response. + {ok, Data} = rfc9114_SUITE:do_receive_data(StreamRef), + {HLenEnc, HLenBits} = rfc9114_SUITE:do_guess_int_encoding(Data), + << + 1, %% HEADERS frame. + HLenEnc:2, HLen:HLenBits, + EncodedResponse:HLen/bytes + >> = Data, + {ok, DecodedResponse, _DecData, _DecSt} + = cow_qpack:decode_field_section(EncodedResponse, 0, cow_qpack:init(decoder)), + #{<<":status">> := <<"200">>} = maps:from_list(DecodedResponse), + ok. + +reject_many_pseudo_header_protocol(Config) -> + doc("An extended CONNECT request containing more than one " + "protocol component must be rejected with a H3_MESSAGE_ERROR " + "stream error. (RFC9220, RFC9114 4.3.1, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request with more than one :protocol pseudo-header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":protocol">>, <<"mqtt">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +reject_unknown_pseudo_header_protocol(Config) -> + doc("An extended CONNECT request containing more than one " + "protocol component must be rejected with a 501 Not Implemented " + "response. (RFC9220, RFC8441 4)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request with an unknown protocol. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"mqtt">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been rejected with a 501 Not Implemented. + #{headers := #{<<":status">> := <<"501">>}} = rfc9114_SUITE:do_receive_response(StreamRef), + ok. + +reject_invalid_pseudo_header_protocol(Config) -> + doc("An extended CONNECT request with an invalid protocol " + "component must be rejected with a 501 Not Implemented " + "response. (RFC9220, RFC8441 4)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request with an invalid protocol. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket mqtt">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been rejected with a 501 Not Implemented. + #{headers := #{<<":status">> := <<"501">>}} = rfc9114_SUITE:do_receive_response(StreamRef), + ok. + +reject_missing_pseudo_header_scheme(Config) -> + doc("An extended CONNECT request whtout a scheme component " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC9114 4.3.1, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request without a :scheme pseudo-header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +reject_missing_pseudo_header_path(Config) -> + doc("An extended CONNECT request whtout a path component " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC9114 4.3.1, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request without a :path pseudo-header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +% On requests bearing the :protocol pseudo-header, the :authority +% pseudo-header field is interpreted according to Section 8.1.2.3 of +% [RFC7540] instead of Section 8.3 of [RFC7540]. In particular the +% server MUST not make a new TCP connection to the host and port +% indicated by the :authority. + +reject_missing_pseudo_header_authority(Config) -> + doc("An extended CONNECT request whtout an authority component " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC9114 4.3.1, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request without an :authority pseudo-header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +% Using Extended CONNECT To Bootstrap The WebSocket Protocol. + +reject_missing_pseudo_header_protocol(Config) -> + doc("An extended CONNECT request whtout a protocol component " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC9114 4.3.1, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request without a :protocol pseudo-header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +% The scheme of the Target URI [RFC7230] MUST be https for wss schemed +% WebSockets. HTTP/3 does not provide support for ws schemed WebSockets. +% The websocket URI is still used for proxy autoconfiguration. + +reject_connection_header(Config) -> + doc("An extended CONNECT request with a connection header " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC8441 4, RFC9114 4.2, RFC9114 4.5, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request with a connection header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"connection">>, <<"upgrade">>}, + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +reject_upgrade_header(Config) -> + doc("An extended CONNECT request with a upgrade header " + "must be rejected with a H3_MESSAGE_ERROR stream error. " + "(RFC9220, RFC8441 4, RFC9114 4.2, RFC9114 4.5, RFC9114 4.1.2)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send an extended CONNECT request with a upgrade header. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"upgrade">>, <<"websocket">>}, + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% The stream should have been aborted. + #{reason := h3_message_error} = rfc9114_SUITE:do_wait_stream_aborted(StreamRef), + ok. + +% After successfully processing the opening handshake the peers should +% proceed with The WebSocket Protocol [RFC6455] using the HTTP/2 stream +% from the CONNECT transaction as if it were the TCP connection +% referred to in [RFC6455]. The state of the WebSocket connection at +% this point is OPEN as defined by [RFC6455], Section 4.1. +%% @todo I'm guessing we should test for things like RST_STREAM, +%% closing the connection and others? + +% Examples. + +accept_handshake_when_enabled(Config) -> + doc("Confirm the example for Websocket over HTTP/3 works. (RFC9220, RFC8441 5.1)"), + %% Connect to server and confirm that SETTINGS_ENABLE_CONNECT_PROTOCOL = 1. + #{conn := Conn, settings := Settings} = rfc9114_SUITE:do_connect(Config), + #{enable_connect_protocol := true} = Settings, + %% Send a CONNECT :protocol request to upgrade the stream to Websocket. + {ok, StreamRef} = quicer:start_stream(Conn, #{}), + {ok, EncodedRequest, _EncData, _EncSt} = cow_qpack:encode_field_section([ + {<<":method">>, <<"CONNECT">>}, + {<<":protocol">>, <<"websocket">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/ws">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<"sec-websocket-version">>, <<"13">>}, + {<<"origin">>, <<"http://localhost">>} + ], 0, cow_qpack:init(encoder)), + {ok, _} = quicer:send(StreamRef, [ + <<1>>, %% HEADERS frame. + cow_http3:encode_int(iolist_size(EncodedRequest)), + EncodedRequest + ]), + %% Receive a 200 response. + {ok, Data} = rfc9114_SUITE:do_receive_data(StreamRef), + {HLenEnc, HLenBits} = rfc9114_SUITE:do_guess_int_encoding(Data), + << + 1, %% HEADERS frame. + HLenEnc:2, HLen:HLenBits, + EncodedResponse:HLen/bytes + >> = Data, + {ok, DecodedResponse, _DecData, _DecSt} + = cow_qpack:decode_field_section(EncodedResponse, 0, cow_qpack:init(decoder)), + #{<<":status">> := <<"200">>} = maps:from_list(DecodedResponse), + %% Masked text hello echoed back clear by the server. + Mask = 16#37fa213d, + MaskedHello = ws_SUITE:do_mask(<<"Hello">>, Mask, <<>>), + {ok, _} = quicer:send(StreamRef, cow_http3:data( + <<1:1, 0:3, 1:4, 1:1, 5:7, Mask:32, MaskedHello/binary>>)), + {ok, WsData} = rfc9114_SUITE:do_receive_data(StreamRef), + << + 0, %% DATA frame. + 0:2, 7:6, %% Length (2 bytes header + "Hello"). + 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" %% Websocket frame. + >> = WsData, + ok. + +%% Closing a Websocket stream. + +% The HTTP/3 stream closure is also analogous to the TCP connection +% closure of [RFC6455]. Orderly TCP-level closures are represented +% as a FIN bit on the stream (Section 4.4 of [HTTP/3]). RST exceptions +% are represented with a stream error (Section 8 of [HTTP/3]) of type +% H3_REQUEST_CANCELLED (Section 8.1 of [HTTP/3]). + +%% @todo client close frame with FIN +%% @todo server close frame with FIN +%% @todo client other frame with FIN +%% @todo server other frame with FIN +%% @todo client close connection diff --git a/test/security_SUITE.erl b/test/security_SUITE.erl index f06cec5..25d5280 100644 --- a/test/security_SUITE.erl +++ b/test/security_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -33,10 +33,12 @@ groups() -> Tests = [nc_rand, nc_zero], H1Tests = [slowloris, slowloris_chunks], H2CTests = [ + http2_cancel_flood, http2_data_dribble, http2_empty_frame_flooding_data, http2_empty_frame_flooding_headers_continuation, http2_empty_frame_flooding_push_promise, + http2_infinite_continuations, http2_ping_flood, http2_reset_flood, http2_settings_flood, @@ -47,10 +49,12 @@ groups() -> {https, [parallel], Tests ++ H1Tests}, {h2, [parallel], Tests}, {h2c, [parallel], Tests ++ H2CTests}, + {h3, [], Tests}, {http_compress, [parallel], Tests ++ H1Tests}, {https_compress, [parallel], Tests ++ H1Tests}, {h2_compress, [parallel], Tests}, - {h2c_compress, [parallel], Tests ++ H2CTests} + {h2c_compress, [parallel], Tests ++ H2CTests}, + {h3_compress, [], Tests} ]. init_per_suite(Config) -> @@ -64,7 +68,7 @@ init_per_group(Name, Config) -> cowboy_test:init_common_groups(Name, Config, ?MODULE). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Routes. @@ -72,12 +76,51 @@ init_dispatch(_) -> cowboy_router:compile([{"localhost", [ {"/", hello_h, []}, {"/echo/:key", echo_h, []}, + {"/delay_hello", delay_hello_h, 1000}, {"/long_polling", long_polling_h, []}, {"/resp/:key[/:arg]", resp_h, []} ]}]). %% Tests. +http2_cancel_flood(Config) -> + doc("Confirm that Cowboy detects the rapid reset attack. (CVE-2023-44487)"), + do_http2_cancel_flood(Config, 1, 500), + do_http2_cancel_flood(Config, 10, 50), + do_http2_cancel_flood(Config, 500, 1), + ok. + +do_http2_cancel_flood(Config, NumStreamsPerBatch, NumBatches) -> + {ok, Socket} = rfc7540_SUITE:do_handshake(Config), + {HeadersBlock, _} = cow_hpack:encode([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<":path">>, <<"/delay_hello">>} + ]), + AllStreamIDs = lists:seq(1, NumBatches * NumStreamsPerBatch * 2, 2), + _ = lists:foldl( + fun (_BatchNumber, AvailableStreamIDs) -> + %% Take a bunch of IDs from the available stream IDs. + %% Send HEADERS for all these and then cancel them. + {IDs, RemainingStreamIDs} = lists:split(NumStreamsPerBatch, AvailableStreamIDs), + _ = gen_tcp:send(Socket, [cow_http2:headers(ID, fin, HeadersBlock) || ID <- IDs]), + _ = gen_tcp:send(Socket, [<<4:24, 3:8, 0:8, ID:32, 8:32>> || ID <- IDs]), + RemainingStreamIDs + end, + AllStreamIDs, + lists:seq(1, NumBatches, 1)), + %% When Cowboy detects a flood it must close the connection. + case gen_tcp:recv(Socket, 17, 6000) of + {ok, <<_:24, 7:8, 0:8, 0:32, _LastStreamId:32, 11:32>>} -> + %% GOAWAY with error code 11 = ENHANCE_YOUR_CALM. + ok; + %% We also accept the connection being closed immediately, + %% which may happen because we send the GOAWAY right before closing. + {error, closed} -> + ok + end. + http2_data_dribble(Config) -> doc("Request a very large response then update the window 1 byte at a time. (CVE-2019-9511)"), {ok, Socket} = rfc7540_SUITE:do_handshake(Config), @@ -179,6 +222,38 @@ http2_empty_frame_flooding_push_promise(Config) -> {ok, <<_:24, 7:8, _:72, 1:32>>} = gen_tcp:recv(Socket, 17, 6000), ok. +http2_infinite_continuations(Config) -> + doc("Confirm that Cowboy rejects CONTINUATION frames when the " + "total size of HEADERS + CONTINUATION(s) exceeds the limit. (VU#421644)"), + {ok, Socket} = rfc7540_SUITE:do_handshake(Config), + %% Send a HEADERS frame followed by a large number + %% of continuation frames. + {HeadersBlock, _} = cow_hpack:encode([ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":authority">>, <<"localhost">>}, %% @todo Correct port number. + {<<":path">>, <<"/">>} + ]), + HeadersBlockLen = iolist_size(HeadersBlock), + ok = gen_tcp:send(Socket, [ + %% HEADERS frame. + << + HeadersBlockLen:24, 1:8, 0:5, + 0:1, %% END_HEADERS + 0:1, + 1:1, %% END_STREAM + 0:1, + 1:31 %% Stream ID. + >>, + HeadersBlock, + %% CONTINUATION frames. + [<<1024:24, 9:8, 0:8, 0:1, 1:31, 0:1024/unit:8>> + || _ <- lists:seq(1, 100)] + ]), + %% Receive an ENHANCE_YOUR_CALM connection error. + {ok, <<_:24, 7:8, _:72, 11:32>>} = gen_tcp:recv(Socket, 17, 6000), + ok. + %% @todo http2_internal_data_buffering(Config) -> I do not know how to test this. % doc("Request many very large responses, with a larger than necessary window size, " % "but do not attempt to read from the socket. (CVE-2019-9517)"), diff --git a/test/static_handler_SUITE.erl b/test/static_handler_SUITE.erl index 71a9619..6721b48 100644 --- a/test/static_handler_SUITE.erl +++ b/test/static_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2016-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -20,6 +20,12 @@ -import(ct_helper, [doc/1]). -import(cowboy_test, [gun_open/1]). +%% Import useful functions from req_SUITE. +%% @todo Maybe move these functions to cowboy_test. +-import(req_SUITE, [do_get/2]). +-import(req_SUITE, [do_get/3]). +-import(req_SUITE, [do_maybe_h3_error3/1]). + %% ct. all() -> @@ -39,16 +45,22 @@ groups() -> {dir, [parallel], DirTests}, {priv_dir, [parallel], DirTests} ], + GroupTestsNoParallel = OtherTests ++ [ + {dir, [], DirTests}, + {priv_dir, [], DirTests} + ], [ {http, [parallel], GroupTests}, {https, [parallel], GroupTests}, {h2, [parallel], GroupTests}, {h2c, [parallel], GroupTests}, + {h3, [], GroupTestsNoParallel}, %% @todo Enable parallel when it works better. {http_compress, [parallel], GroupTests}, {https_compress, [parallel], GroupTests}, {h2_compress, [parallel], GroupTests}, {h2c_compress, [parallel], GroupTests}, - %% No real need to test sendfile disabled against https or h2. + {h3_compress, [], GroupTestsNoParallel}, %% @todo Enable parallel when it works better. + %% No real need to test sendfile disabled against https, h2 or h3. {http_no_sendfile, [parallel], GroupTests}, {h2c_no_sendfile, [parallel], GroupTests} ]. @@ -65,7 +77,7 @@ init_per_suite(Config) -> %% Add a simple Erlang application archive containing one file %% in its priv directory. true = code:add_pathz(filename:join( - [config(data_dir, Config), "static_files_app", "ebin"])), + [config(data_dir, Config), "static_files_app.ez", "static_files_app", "ebin"])), ok = application:load(static_files_app), %% A special folder contains files of 1 character from 1 to 127 %% excluding / and \ as they are always rejected. @@ -116,6 +128,17 @@ init_per_group(Name=h2c_no_sendfile, Config) -> sendfile => false }, [{flavor, vanilla}|Config]), lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name=h3, Config) -> + cowboy_test:init_http3(Name, #{ + env => #{dispatch => init_dispatch(Config)}, + middlewares => [?MODULE, cowboy_router, cowboy_handler] + }, [{flavor, vanilla}|Config]); +init_per_group(Name=h3_compress, Config) -> + cowboy_test:init_http3(Name, #{ + env => #{dispatch => init_dispatch(Config)}, + middlewares => [?MODULE, cowboy_router, cowboy_handler], + stream_handlers => [cowboy_compress_h, cowboy_stream_h] + }, [{flavor, vanilla}|Config]); init_per_group(Name, Config) -> Config1 = cowboy_test:init_common_groups(Name, Config, ?MODULE), Opts = ranch:get_protocol_options(Name), @@ -129,7 +152,7 @@ end_per_group(dir, _) -> end_per_group(priv_dir, _) -> ok; end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). %% Large file. @@ -207,7 +230,7 @@ execute(Req=#{path := Path}, Env) -> <<"/bad/dir/route">> -> ct_helper:ignore(cowboy_static, escape_reserved, 1); <<"/bad">> -> ct_helper:ignore(cowboy_static, init_opts, 2); <<"/bad/options">> -> ct_helper:ignore(cowboy_static, content_types_provided, 2); - <<"/bad/options/mime">> -> ct_helper:ignore(cowboy_rest, set_content_type, 2); + <<"/bad/options/mime">> -> ct_helper:ignore(cowboy_rest, normalize_content_types, 2); <<"/bad/options/etag">> -> ct_helper:ignore(cowboy_static, generate_etag, 2); <<"/bad/options/charset">> -> ct_helper:ignore(cowboy_static, charsets_provided, 2); _ -> ok @@ -248,25 +271,11 @@ do_mime_custom(Path) -> _ -> {<<"application">>, <<"octet-stream">>, []} end. -do_get(Path, Config) -> - do_get(Path, [], Config). - -do_get(Path, ReqHeaders, Config) -> - ConnPid = gun_open(Config), - Ref = gun:get(ConnPid, Path, [{<<"accept-encoding">>, <<"gzip">>}|ReqHeaders]), - {response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref), - {ok, Body} = case IsFin of - nofin -> gun:await_body(ConnPid, Ref); - fin -> {ok, <<>>} - end, - gun:close(ConnPid), - {Status, RespHeaders, Body}. - %% Tests. bad(Config) -> doc("Bad cowboy_static options: not a tuple."), - {500, _, _} = do_get("/bad", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad", Config)), ok. bad_dir_path(Config) -> @@ -276,7 +285,7 @@ bad_dir_path(Config) -> bad_dir_route(Config) -> doc("Bad cowboy_static options: missing [...] in route."), - {500, _, _} = do_get("/bad/dir/route", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/dir/route", Config)), ok. bad_file_in_priv_dir_in_ez_archive(Config) -> @@ -291,27 +300,27 @@ bad_file_path(Config) -> bad_options(Config) -> doc("Bad cowboy_static extra options: not a list."), - {500, _, _} = do_get("/bad/options", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/options", Config)), ok. bad_options_charset(Config) -> doc("Bad cowboy_static extra options: invalid charset option."), - {500, _, _} = do_get("/bad/options/charset", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/options/charset", Config)), ok. bad_options_etag(Config) -> doc("Bad cowboy_static extra options: invalid etag option."), - {500, _, _} = do_get("/bad/options/etag", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/options/etag", Config)), ok. bad_options_mime(Config) -> doc("Bad cowboy_static extra options: invalid mimetypes option."), - {500, _, _} = do_get("/bad/options/mime", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/options/mime", Config)), ok. bad_priv_dir_app(Config) -> doc("Bad cowboy_static options: wrong application name."), - {500, _, _} = do_get("/bad/priv_dir/app/style.css", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/priv_dir/app/style.css", Config)), ok. bad_priv_dir_in_ez_archive(Config) -> @@ -331,12 +340,12 @@ bad_priv_dir_path(Config) -> bad_priv_dir_route(Config) -> doc("Bad cowboy_static options: missing [...] in route."), - {500, _, _} = do_get("/bad/priv_dir/route", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/priv_dir/route", Config)), ok. bad_priv_file_app(Config) -> doc("Bad cowboy_static options: wrong application name."), - {500, _, _} = do_get("/bad/priv_file/app", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/bad/priv_file/app", Config)), ok. bad_priv_file_in_ez_archive(Config) -> @@ -535,7 +544,7 @@ dir_unknown(Config) -> etag_crash(Config) -> doc("Get a file with a crashing etag function."), - {500, _, _} = do_get("/etag/crash", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/etag/crash", Config)), ok. etag_custom(Config) -> @@ -813,7 +822,7 @@ mime_all_uppercase(Config) -> mime_crash(Config) -> doc("Get a file with a crashing mimetype function."), - {500, _, _} = do_get("/mime/crash/style.css", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/mime/crash/style.css", Config)), ok. mime_custom_cowboy(Config) -> @@ -848,7 +857,7 @@ mime_hardcode_tuple(Config) -> charset_crash(Config) -> doc("Get a file with a crashing charset function."), - {500, _, _} = do_get("/charset/crash/style.css", Config), + {500, _, _} = do_maybe_h3_error3(do_get("/charset/crash/style.css", Config)), ok. charset_custom_cowboy(Config) -> @@ -933,7 +942,8 @@ unicode_basic_error(Config) -> %% # and ? indicate fragment and query components %% and are therefore not part of the path. http -> "\r\s#?"; - http2 -> "#?" + http2 -> "#?"; + http3 -> "#?" end, _ = [case do_get("/char/" ++ [C], Config) of {400, _, _} -> ok; diff --git a/test/stream_handler_SUITE.erl b/test/stream_handler_SUITE.erl index 46a05b2..90229c0 100644 --- a/test/stream_handler_SUITE.erl +++ b/test/stream_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -31,50 +31,42 @@ groups() -> %% We set this module as a logger in order to silence expected errors. init_per_group(Name = http, Config) -> - cowboy_test:init_http(Name, #{ - logger => ?MODULE, - stream_handlers => [stream_handler_h] - }, Config); + cowboy_test:init_http(Name, init_plain_opts(), Config); init_per_group(Name = https, Config) -> - cowboy_test:init_https(Name, #{ - logger => ?MODULE, - stream_handlers => [stream_handler_h] - }, Config); + cowboy_test:init_https(Name, init_plain_opts(), Config); init_per_group(Name = h2, Config) -> - cowboy_test:init_http2(Name, #{ - logger => ?MODULE, - stream_handlers => [stream_handler_h] - }, Config); + cowboy_test:init_http2(Name, init_plain_opts(), Config); init_per_group(Name = h2c, Config) -> - Config1 = cowboy_test:init_http(Name, #{ - logger => ?MODULE, - stream_handlers => [stream_handler_h] - }, Config), + Config1 = cowboy_test:init_http(Name, init_plain_opts(), Config), lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3, Config) -> + cowboy_test:init_http3(Name, init_plain_opts(), Config); init_per_group(Name = http_compress, Config) -> - cowboy_test:init_http(Name, #{ - logger => ?MODULE, - stream_handlers => [cowboy_compress_h, stream_handler_h] - }, Config); + cowboy_test:init_http(Name, init_compress_opts(), Config); init_per_group(Name = https_compress, Config) -> - cowboy_test:init_https(Name, #{ - logger => ?MODULE, - stream_handlers => [cowboy_compress_h, stream_handler_h] - }, Config); + cowboy_test:init_https(Name, init_compress_opts(), Config); init_per_group(Name = h2_compress, Config) -> - cowboy_test:init_http2(Name, #{ - logger => ?MODULE, - stream_handlers => [cowboy_compress_h, stream_handler_h] - }, Config); + cowboy_test:init_http2(Name, init_compress_opts(), Config); init_per_group(Name = h2c_compress, Config) -> - Config1 = cowboy_test:init_http(Name, #{ - logger => ?MODULE, - stream_handlers => [cowboy_compress_h, stream_handler_h] - }, Config), - lists:keyreplace(protocol, 1, Config1, {protocol, http2}). + Config1 = cowboy_test:init_http(Name, init_compress_opts(), Config), + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(Name = h3_compress, Config) -> + cowboy_test:init_http3(Name, init_compress_opts(), Config). end_per_group(Name, _) -> - cowboy:stop_listener(Name). + cowboy_test:stop_group(Name). + +init_plain_opts() -> + #{ + logger => ?MODULE, + stream_handlers => [stream_handler_h] + }. + +init_compress_opts() -> + #{ + logger => ?MODULE, + stream_handlers => [cowboy_compress_h, stream_handler_h] + }. %% Logger function silencing the expected crashes. @@ -99,15 +91,20 @@ crash_in_init(Config) -> %% Confirm terminate/3 is NOT called. We have no state to give to it. receive {Self, Pid, terminate, _, _, _} -> error(terminate) after 1000 -> ok end, %% Confirm early_error/5 is called in HTTP/1.1's case. - %% HTTP/2 does not send a response back so there is no early_error call. + %% HTTP/2 and HTTP/3 do not send a response back so there is no early_error call. case config(protocol, Config) of http -> receive {Self, Pid, early_error, _, _, _, _, _} -> ok after 1000 -> error(timeout) end; - http2 -> ok + http2 -> ok; + http3 -> ok end, - %% Receive a 500 error response. - case gun:await(ConnPid, Ref) of - {response, fin, 500, _} -> ok; - {error, {stream_error, {stream_error, internal_error, _}}} -> ok + do_await_internal_error(ConnPid, Ref, Config). + +do_await_internal_error(ConnPid, Ref, Config) -> + Protocol = config(protocol, Config), + case {Protocol, gun:await(ConnPid, Ref)} of + {http, {response, fin, 500, _}} -> ok; + {http2, {error, {stream_error, {stream_error, internal_error, _}}}} -> ok; + {http3, {error, {stream_error, {stream_error, h3_internal_error, _}}}} -> ok end. crash_in_data(Config) -> @@ -126,11 +123,7 @@ crash_in_data(Config) -> gun:data(ConnPid, Ref, fin, <<"Hello!">>), %% Confirm terminate/3 is called, indicating the stream ended. receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, - %% Receive a 500 error response. - case gun:await(ConnPid, Ref) of - {response, fin, 500, _} -> ok; - {error, {stream_error, {stream_error, internal_error, _}}} -> ok - end. + do_await_internal_error(ConnPid, Ref, Config). crash_in_info(Config) -> doc("Confirm an error is sent when a stream handler crashes in info/3."), @@ -144,14 +137,14 @@ crash_in_info(Config) -> %% Confirm init/3 is called. Pid = receive {Self, P, init, _, _, _} -> P after 1000 -> error(timeout) end, %% Send a message to make the stream handler crash. - Pid ! {{Pid, 1}, crash}, + StreamID = case config(protocol, Config) of + http3 -> 0; + _ -> 1 + end, + Pid ! {{Pid, StreamID}, crash}, %% Confirm terminate/3 is called, indicating the stream ended. receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, - %% Receive a 500 error response. - case gun:await(ConnPid, Ref) of - {response, fin, 500, _} -> ok; - {error, {stream_error, {stream_error, internal_error, _}}} -> ok - end. + do_await_internal_error(ConnPid, Ref, Config). crash_in_terminate(Config) -> doc("Confirm the state is correct when a stream handler crashes in terminate/3."), @@ -185,10 +178,12 @@ crash_in_terminate(Config) -> {ok, <<"Hello world!">>} = gun:await_body(ConnPid, Ref2), ok. +%% @todo The callbacks ARE used for HTTP/2 and HTTP/3 CONNECT/TRACE requests. crash_in_early_error(Config) -> case config(protocol, Config) of http -> do_crash_in_early_error(Config); - http2 -> doc("The callback early_error/5 is not currently used for HTTP/2.") + http2 -> doc("The callback early_error/5 is not currently used for HTTP/2."); + http3 -> doc("The callback early_error/5 is not currently used for HTTP/3.") end. do_crash_in_early_error(Config) -> @@ -225,10 +220,12 @@ do_crash_in_early_error(Config) -> {response, fin, 500, _} = gun:await(ConnPid, Ref2), ok. +%% @todo The callbacks ARE used for HTTP/2 and HTTP/3 CONNECT/TRACE requests. crash_in_early_error_fatal(Config) -> case config(protocol, Config) of http -> do_crash_in_early_error_fatal(Config); - http2 -> doc("The callback early_error/5 is not currently used for HTTP/2.") + http2 -> doc("The callback early_error/5 is not currently used for HTTP/2."); + http3 -> doc("The callback early_error/5 is not currently used for HTTP/3.") end. do_crash_in_early_error_fatal(Config) -> @@ -262,7 +259,8 @@ early_error_stream_error_reason(Config) -> %% reason in both protocols. {Method, Headers, Status, Error} = case config(protocol, Config) of http -> {<<"GET">>, [{<<"host">>, <<"host:port">>}], 400, protocol_error}; - http2 -> {<<"TRACE">>, [], 501, no_error} + http2 -> {<<"TRACE">>, [], 501, no_error}; + http3 -> {<<"TRACE">>, [], 501, h3_no_error} end, Ref = gun:request(ConnPid, Method, "/long_polling", [ {<<"accept-encoding">>, <<"gzip">>}, @@ -293,7 +291,7 @@ flow_after_body_fully_read(Config) -> %% Receive a 200 response, sent after the second flow command, %% confirming that the flow command was accepted. {response, _, 200, _} = gun:await(ConnPid, Ref), - ok. + gun:close(ConnPid). set_options_ignore_unknown(Config) -> doc("Confirm that unknown options are ignored when using the set_options commands."), @@ -355,11 +353,20 @@ shutdown_on_socket_close(Config) -> Spawn ! {Self, ready}, %% Close the socket. ok = gun:close(ConnPid), - %% Confirm terminate/3 is called, indicating the stream ended. - receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, - %% Confirm we receive a DOWN message for the child process. - receive {'DOWN', MRef, process, Spawn, shutdown} -> ok after 1000 -> error(timeout) end, - ok. + Protocol = config(protocol, Config), + try + %% Confirm terminate/3 is called, indicating the stream ended. + receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, + %% Confirm we receive a DOWN message for the child process. + receive {'DOWN', MRef, process, Spawn, shutdown} -> ok after 1000 -> error(timeout) end, + ok + catch error:timeout when Protocol =:= http3 -> + %% @todo Figure out why this happens. Could be a timing issue + %% or a legitimate bug. I suspect that the server just + %% doesn't receive the GOAWAY frame from Gun because + %% Gun is too quick to close the connection. + shutdown_on_socket_close(Config) + end. shutdown_timeout_on_stream_stop(Config) -> doc("Confirm supervised processes are killed " @@ -406,33 +413,45 @@ shutdown_timeout_on_socket_close(Config) -> Spawn ! {Self, ready}, %% Close the socket. ok = gun:close(ConnPid), - %% Confirm terminate/3 is called, indicating the stream ended. - receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, - %% We should NOT receive a DOWN message immediately. - receive {'DOWN', MRef, process, Spawn, killed} -> error(killed) after 1500 -> ok end, - %% We should received it now. - receive {'DOWN', MRef, process, Spawn, killed} -> ok after 1000 -> error(timeout) end, - ok. + Protocol = config(protocol, Config), + try + %% Confirm terminate/3 is called, indicating the stream ended. + receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, + %% We should NOT receive a DOWN message immediately. + receive {'DOWN', MRef, process, Spawn, killed} -> error(killed) after 1500 -> ok end, + %% We should receive it now. + receive {'DOWN', MRef, process, Spawn, killed} -> ok after 1000 -> error(timeout) end, + ok + catch error:timeout when Protocol =:= http3 -> + %% @todo Figure out why this happens. Could be a timing issue + %% or a legitimate bug. I suspect that the server just + %% doesn't receive the GOAWAY frame from Gun because + %% Gun is too quick to close the connection. + shutdown_timeout_on_socket_close(Config) + end. switch_protocol_after_headers(Config) -> case config(protocol, Config) of http -> do_switch_protocol_after_response( <<"switch_protocol_after_headers">>, Config); - http2 -> doc("The switch_protocol command is not currently supported for HTTP/2.") + http2 -> doc("The switch_protocol command is not currently supported for HTTP/2."); + http3 -> doc("The switch_protocol command is not currently supported for HTTP/3.") end. switch_protocol_after_headers_data(Config) -> case config(protocol, Config) of http -> do_switch_protocol_after_response( <<"switch_protocol_after_headers_data">>, Config); - http2 -> doc("The switch_protocol command is not currently supported for HTTP/2.") + http2 -> doc("The switch_protocol command is not currently supported for HTTP/2."); + http3 -> doc("The switch_protocol command is not currently supported for HTTP/3.") end. switch_protocol_after_response(Config) -> case config(protocol, Config) of http -> do_switch_protocol_after_response( <<"switch_protocol_after_response">>, Config); - http2 -> doc("The switch_protocol command is not currently supported for HTTP/2.") + http2 -> doc("The switch_protocol command is not currently supported for HTTP/2."); + http3 -> doc("The switch_protocol command is not currently supported for HTTP/3.") end. do_switch_protocol_after_response(TestCase, Config) -> @@ -502,7 +521,12 @@ terminate_on_stop(Config) -> {response, fin, 204, _} = gun:await(ConnPid, Ref), %% Confirm the stream is still alive even though we %% received the response fully, and tell it to stop. - Pid ! {{Pid, 1}, please_stop}, + StreamID = case config(protocol, Config) of + http -> 1; + http2 -> 1; + http3 -> 0 + end, + Pid ! {{Pid, StreamID}, please_stop}, receive {Self, Pid, info, _, please_stop, _} -> ok after 1000 -> error(timeout) end, %% Confirm terminate/3 is called. receive {Self, Pid, terminate, _, _, _} -> ok after 1000 -> error(timeout) end, @@ -511,7 +535,8 @@ terminate_on_stop(Config) -> terminate_on_switch_protocol(Config) -> case config(protocol, Config) of http -> do_terminate_on_switch_protocol(Config); - http2 -> doc("The switch_protocol command is not currently supported for HTTP/2.") + http2 -> doc("The switch_protocol command is not currently supported for HTTP/2."); + http3 -> doc("The switch_protocol command is not currently supported for HTTP/3.") end. do_terminate_on_switch_protocol(Config) -> diff --git a/test/sys_SUITE.erl b/test/sys_SUITE.erl index 175219c..3591490 100644 --- a/test/sys_SUITE.erl +++ b/test/sys_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -22,7 +22,6 @@ -import(ct_helper, [get_remote_pid_tcp/1]). -import(ct_helper, [get_remote_pid_tls/1]). -import(ct_helper, [is_process_down/1]). --import(cowboy_test, [gun_open/1]). all() -> [{group, sys}]. @@ -109,7 +108,8 @@ bad_system_from_h1(Config) -> bad_system_from_h2(Config) -> doc("h2: Sending a system message with a bad From value results in a process crash."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -176,7 +176,8 @@ bad_system_message_h1(Config) -> bad_system_message_h2(Config) -> doc("h2: Sending a system message with a bad Request value results in an error."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -252,7 +253,8 @@ good_system_message_h1(Config) -> good_system_message_h2(Config) -> doc("h2: System messages are handled properly."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -336,7 +338,8 @@ trap_exit_parent_exit_h2(Config) -> doc("h2: A process trapping exits must stop when receiving " "an 'EXIT' message from its parent."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -408,7 +411,8 @@ trap_exit_other_exit_h2(Config) -> doc("h2: A process trapping exits must ignore " "'EXIT' messages from unknown processes."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), Pid ! {'EXIT', self(), {shutdown, ?MODULE}}, @@ -526,7 +530,8 @@ sys_change_code_h1(Config) -> sys_change_code_h2(Config) -> doc("h2: The sys:change_code/4 function works as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% Suspend the process and try to get a request in. The @@ -609,7 +614,8 @@ sys_get_state_h1(Config) -> sys_get_state_h2(Config) -> doc("h2: The sys:get_state/1 function works as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -653,7 +659,7 @@ sys_get_state_loop(Config) -> timer:sleep(100), SupPid = get_remote_pid_tcp(Socket), [{_, Pid, _, _}] = supervisor:which_children(SupPid), - {Req, Env, long_polling_sys_h, undefined} = sys:get_state(Pid), + {Req, Env, long_polling_sys_h, undefined, infinity} = sys:get_state(Pid), #{pid := _, streamid := _} = Req, #{dispatch := _} = Env, ok. @@ -671,7 +677,8 @@ sys_get_status_h1(Config) -> sys_get_status_h2(Config) -> doc("h2: The sys:get_status/1 function works as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -732,7 +739,8 @@ sys_replace_state_h1(Config) -> sys_replace_state_h2(Config) -> doc("h2: The sys:replace_state/2 function works as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -776,7 +784,7 @@ sys_replace_state_loop(Config) -> timer:sleep(100), SupPid = get_remote_pid_tcp(Socket), [{_, Pid, _, _}] = supervisor:which_children(SupPid), - {Req, Env, long_polling_sys_h, undefined} = sys:replace_state(Pid, fun(S) -> S end), + {Req, Env, long_polling_sys_h, undefined, infinity} = sys:replace_state(Pid, fun(S) -> S end), #{pid := _, streamid := _} = Req, #{dispatch := _} = Env, ok. @@ -801,7 +809,8 @@ sys_suspend_and_resume_h1(Config) -> sys_suspend_and_resume_h2(Config) -> doc("h2: The sys:suspend/1 and sys:resume/1 functions work as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% Suspend the process and try to get a request in. The @@ -880,7 +889,8 @@ sys_terminate_h1(Config) -> sys_terminate_h2(Config) -> doc("h2: The sys:terminate/2,3 function works as expected."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), %% Skip the SETTINGS frame. {ok, <<_,_,_,4,_/bits>>} = ssl:recv(Socket, 0, 1000), timer:sleep(100), @@ -983,7 +993,8 @@ supervisor_count_children_h1(Config) -> supervisor_count_children_h2(Config) -> doc("h2: The function supervisor:count_children/1 must work."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% No request was sent so there's no children. @@ -1055,7 +1066,8 @@ supervisor_delete_child_not_found_h1(Config) -> supervisor_delete_child_not_found_h2(Config) -> doc("h2: The function supervisor:delete_child/2 must return {error, not_found}."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% When no children exist. @@ -1114,7 +1126,8 @@ supervisor_get_childspec_not_found_h1(Config) -> supervisor_get_childspec_not_found_h2(Config) -> doc("h2: The function supervisor:get_childspec/2 must return {error, not_found}."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% When no children exist. @@ -1173,7 +1186,8 @@ supervisor_restart_child_not_found_h1(Config) -> supervisor_restart_child_not_found_h2(Config) -> doc("h2: The function supervisor:restart_child/2 must return {error, not_found}."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% When no children exist. @@ -1227,7 +1241,8 @@ supervisor_start_child_not_found_h1(Config) -> supervisor_start_child_not_found_h2(Config) -> doc("h2: The function supervisor:start_child/2 must return {error, start_child_disabled}."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), {error, start_child_disabled} = supervisor:start_child(Pid, #{ @@ -1281,7 +1296,8 @@ supervisor_terminate_child_not_found_h1(Config) -> supervisor_terminate_child_not_found_h2(Config) -> doc("h2: The function supervisor:terminate_child/2 must return {error, not_found}."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% When no children exist. @@ -1344,7 +1360,8 @@ supervisor_which_children_h1(Config) -> supervisor_which_children_h2(Config) -> doc("h2: The function supervisor:which_children/1 must work."), {ok, Socket} = ssl:connect("localhost", config(tls_port, Config), - [{active, false}, binary, {alpn_advertised_protocols, [<<"h2">>]}]), + [{alpn_advertised_protocols, [<<"h2">>]}, + {active, false}, binary|config(tls_opts, Config)]), do_http2_handshake(Socket), Pid = get_remote_pid_tls(Socket), %% No request was sent so there's no children. diff --git a/test/tracer_SUITE.erl b/test/tracer_SUITE.erl index d5683a0..4298b44 100644 --- a/test/tracer_SUITE.erl +++ b/test/tracer_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -29,13 +29,15 @@ suite() -> %% We initialize trace patterns here. Appropriate would be in %% init_per_suite/1, but this works just as well. all() -> - case code:is_module_native(?MODULE) of - true -> - {skip, "The Cowboy tracer is not compatible with native code."}; - false -> - cowboy_tracer_h:set_trace_patterns(), - cowboy_test:common_all() - end. + %% @todo Implement these tests for HTTP/3. + cowboy_test:common_all() -- [{group, h3}, {group, h3_compress}]. + +init_per_suite(Config) -> + cowboy_tracer_h:set_trace_patterns(), + Config. + +end_per_suite(_) -> + ok. %% We want tests for each group to execute sequentially %% because we need to modify the protocol options. Groups diff --git a/test/ws_SUITE.erl b/test/ws_SUITE.erl index 9abeaca..6fa4e61 100644 --- a/test/ws_SUITE.erl +++ b/test/ws_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -68,7 +68,8 @@ init_dispatch() -> {"/ws_timeout_cancel", ws_timeout_cancel, []}, {"/ws_max_frame_size", ws_max_frame_size, []}, {"/ws_deflate_opts", ws_deflate_opts_h, []}, - {"/ws_dont_validate_utf8", ws_dont_validate_utf8_h, []} + {"/ws_dont_validate_utf8", ws_dont_validate_utf8_h, []}, + {"/ws_ping", ws_ping_h, []} ]} ]). @@ -202,6 +203,25 @@ do_ws_version(Socket) -> {error, closed} = gen_tcp:recv(Socket, 0, 6000), ok. +ws_deflate_max_frame_size_close(Config) -> + doc("Server closes connection when decompressed frame size exceeds max_frame_size option"), + %% max_frame_size is set to 8 bytes in ws_max_frame_size. + {ok, Socket, Headers} = do_handshake("/ws_max_frame_size", + "Sec-WebSocket-Extensions: permessage-deflate\r\n", Config), + {_, "permessage-deflate"} = lists:keyfind("sec-websocket-extensions", 1, Headers), + Mask = 16#11223344, + Z = zlib:open(), + zlib:deflateInit(Z, best_compression, deflated, -15, 8, default), + CompressedData0 = iolist_to_binary(zlib:deflate(Z, <<0:800>>, sync)), + CompressedData = binary:part(CompressedData0, 0, byte_size(CompressedData0) - 4), + MaskedData = do_mask(CompressedData, Mask, <<>>), + Len = byte_size(MaskedData), + true = Len < 8, + ok = gen_tcp:send(Socket, << 1:1, 1:1, 0:2, 1:4, 1:1, Len:7, Mask:32, MaskedData/binary >>), + {ok, << 1:1, 0:3, 8:4, 0:1, 2:7, 1009:16 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + ws_deflate_opts_client_context_takeover(Config) -> doc("Handler is configured with client context takeover enabled."), {ok, _, Headers1} = do_handshake("/ws_deflate_opts?client_context_takeover", @@ -247,6 +267,21 @@ ws_deflate_opts_client_max_window_bits_override(Config) -> = lists:keyfind("sec-websocket-extensions", 1, Headers2), ok. +%% @todo This might be better in an rfc7692_SUITE. +%% +%% 7.1.2.2 +%% If a received extension negotiation offer doesn't have the +%% "client_max_window_bits" extension parameter, the corresponding +%% extension negotiation response to the offer MUST NOT include the +%% "client_max_window_bits" extension parameter. +ws_deflate_opts_client_max_window_bits_only_in_server(Config) -> + doc("Handler is configured with non-default client max window bits but " + "client doesn't send the parameter; compression is disabled."), + {ok, _, Headers} = do_handshake("/ws_deflate_opts?client_max_window_bits", + "Sec-WebSocket-Extensions: permessage-deflate\r\n", Config), + false = lists:keyfind("sec-websocket-extensions", 1, Headers), + ok. + ws_deflate_opts_server_context_takeover(Config) -> doc("Handler is configured with server context takeover enabled."), {ok, _, Headers1} = do_handshake("/ws_deflate_opts?server_context_takeover", @@ -341,6 +376,7 @@ ws_first_frame_with_handshake(Config) -> {ok, <<1:1, 0:3, 1:4, 0:1, 5:7, "Hello">>} = gen_tcp:recv(Socket, 0, 6000), ok. +%% @todo Move these tests to ws_handler_SUITE. ws_init_return_ok(Config) -> doc("Handler does nothing."), {ok, Socket, _} = do_handshake("/ws_init?ok", Config), @@ -471,6 +507,17 @@ ws_max_frame_size_intermediate_fragment_close(Config) -> {error, closed} = gen_tcp:recv(Socket, 0, 6000), ok. +ws_ping(Config) -> + doc("Server initiated pings can receive a pong in response."), + {ok, Socket, _} = do_handshake("/ws_ping", Config), + %% Receive a server-sent ping. + {ok, << 1:1, 0:3, 9:4, 0:1, 0:7 >>} = gen_tcp:recv(Socket, 0, 6000), + %% Send a pong back with a 0 mask. + ok = gen_tcp:send(Socket, << 1:1, 0:3, 10:4, 1:1, 0:7, 0:32 >>), + %% Receive a text frame as a result. + {ok, << 1:1, 0:3, 1:4, 0:1, 4:7, "OK!!" >>} = gen_tcp:recv(Socket, 0, 6000), + ok. + ws_send_close(Config) -> doc("Server-initiated close frame ends the connection."), {ok, Socket, _} = do_handshake("/ws_send_close", Config), diff --git a/test/ws_SUITE_data/ws_max_frame_size.erl b/test/ws_SUITE_data/ws_max_frame_size.erl index 3d81497..76df0b0 100644 --- a/test/ws_SUITE_data/ws_max_frame_size.erl +++ b/test/ws_SUITE_data/ws_max_frame_size.erl @@ -5,7 +5,7 @@ -export([websocket_info/2]). init(Req, State) -> - {cowboy_websocket, Req, State, #{max_frame_size => 8}}. + {cowboy_websocket, Req, State, #{max_frame_size => 8, compress => true}}. websocket_handle({text, Data}, State) -> {[{text, Data}], State}; diff --git a/test/ws_autobahn_SUITE.erl b/test/ws_autobahn_SUITE.erl index 24d76e8..58d15fa 100644 --- a/test/ws_autobahn_SUITE.erl +++ b/test/ws_autobahn_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011-2017, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -51,7 +51,7 @@ end_per_group(Listener, _Config) -> init_dispatch() -> cowboy_router:compile([ - {"localhost", [ + {"host.docker.internal", [ {"/ws_echo", ws_echo, []} ]} ]). @@ -73,7 +73,15 @@ autobahn_fuzzingclient(Config) -> end. do_start_port(Config, Pid) -> - Port = open_port({spawn, "wstest -m fuzzingclient -s " ++ config(data_dir, Config) ++ "client.json"}, +% Cmd = "wstest -m fuzzingclient -s " ++ config(data_dir, Config) ++ "client.json", + Cmd = "sudo docker run --rm " + "-v " ++ config(data_dir, Config) ++ "/client.json:/client.json " + "-v " ++ config(priv_dir, Config) ++ "/reports:/reports " + "--add-host=host.docker.internal:host-gateway " + "--name fuzzingclient " + "crossbario/autobahn-testsuite " + "wstest -m fuzzingclient -s client.json", + Port = open_port({spawn, Cmd}, [{line, 10000}, {cd, config(priv_dir, Config)}, binary, eof]), do_receive_infinity(Port, Pid). diff --git a/test/ws_autobahn_SUITE_data/client.json b/test/ws_autobahn_SUITE_data/client.json index 7899503..c729193 100644 --- a/test/ws_autobahn_SUITE_data/client.json +++ b/test/ws_autobahn_SUITE_data/client.json @@ -4,7 +4,7 @@ "servers": [{ "agent": "Cowboy", - "url": "ws://localhost:33080/ws_echo", + "url": "ws://host.docker.internal:33080/ws_echo", "options": {"version": 18} }], diff --git a/test/ws_handler_SUITE.erl b/test/ws_handler_SUITE.erl index 435600f..ab9dbe2 100644 --- a/test/ws_handler_SUITE.erl +++ b/test/ws_handler_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2018, Loïc Hoguin <[email protected]> +%% Copyright (c) Loïc Hoguin <[email protected]> %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -50,6 +50,7 @@ init_dispatch(Name) -> {"/init", ws_init_commands_h, RunOrHibernate}, {"/handle", ws_handle_commands_h, RunOrHibernate}, {"/info", ws_info_commands_h, RunOrHibernate}, + {"/trap_exit", ws_init_h, RunOrHibernate}, {"/active", ws_active_commands_h, RunOrHibernate}, {"/deflate", ws_deflate_commands_h, RunOrHibernate}, {"/set_options", ws_set_options_commands_h, RunOrHibernate}, @@ -211,6 +212,13 @@ do_many_frames_then_close_frame(Config, Path) -> {ok, close} = receive_ws(ConnPid, StreamRef), gun_down(ConnPid). +websocket_init_trap_exit_false(Config) -> + doc("The trap_exit process flag must be set back to false before " + "the connection is taken over by Websocket."), + {ok, ConnPid, StreamRef} = gun_open_ws(Config, "/trap_exit?reply_trap_exit", []), + {ok, {text, <<"trap_exit: false">>}} = receive_ws(ConnPid, StreamRef), + ok. + websocket_active_false(Config) -> doc("The {active, false} command stops receiving data from the socket. " "The {active, true} command reenables it."), @@ -288,6 +296,41 @@ websocket_set_options_idle_timeout(Config) -> error(timeout) end. +websocket_set_options_max_frame_size(Config) -> + doc("The max_frame_size option can be modified using the " + "command {set_options, Opts} at runtime."), + ConnPid = gun_open(Config), + StreamRef = gun:ws_upgrade(ConnPid, "/set_options"), + receive + {gun_upgrade, ConnPid, StreamRef, [<<"websocket">>], _} -> + ok; + {gun_response, ConnPid, _, _, Status, Headers} -> + exit({ws_upgrade_failed, Status, Headers}); + {gun_error, ConnPid, StreamRef, Reason} -> + exit({ws_upgrade_failed, Reason}) + after 1000 -> + error(timeout) + end, + %% We first send a 1MB frame to confirm that yes, we can + %% send a frame that large. The default max_frame_size is infinity. + gun:ws_send(ConnPid, StreamRef, {binary, <<0:8000000>>}), + {ws, {binary, <<0:8000000>>}} = gun:await(ConnPid, StreamRef), + %% Trigger the change in max_frame_size. From now on we will + %% only allow frames of up to 1000 bytes. + gun:ws_send(ConnPid, StreamRef, {text, <<"max_frame_size_small">>}), + %% Confirm that we can send frames of up to 1000 bytes. + gun:ws_send(ConnPid, StreamRef, {binary, <<0:8000>>}), + {ws, {binary, <<0:8000>>}} = gun:await(ConnPid, StreamRef), + %% Confirm that sending frames larger than 1000 bytes + %% results in the closing of the connection. + gun:ws_send(ConnPid, StreamRef, {binary, <<0:8008>>}), + receive + {gun_down, ConnPid, _, _, _} -> + ok + after 2000 -> + error(timeout) + end. + websocket_shutdown_reason(Config) -> doc("The command {shutdown_reason, any()} can be used to " "change the shutdown reason of a Websocket connection."), diff --git a/test/ws_perf_SUITE.erl b/test/ws_perf_SUITE.erl new file mode 100644 index 0000000..ff88554 --- /dev/null +++ b/test/ws_perf_SUITE.erl @@ -0,0 +1,308 @@ +%% Copyright (c) Loïc Hoguin <[email protected]> +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ws_perf_SUITE). +-compile(export_all). +-compile(nowarn_export_all). + +-import(ct_helper, [config/2]). +-import(ct_helper, [doc/1]). +-import(cowboy_test, [gun_open/2]). +-import(cowboy_test, [gun_down/1]). + +%% ct. + +all() -> + [{group, binary}, {group, ascii}, {group, mixed}, {group, japanese}]. + +groups() -> + CommonGroups = cowboy_test:common_groups(ct_helper:all(?MODULE), no_parallel), + SubGroups = [G || G = {GN, _, _} <- CommonGroups, + GN =:= http orelse GN =:= h2c orelse GN =:= http_compress orelse GN =:= h2c_compress], + [ + {binary, [], SubGroups}, + {ascii, [], SubGroups}, + {mixed, [], SubGroups}, + {japanese, [], SubGroups} + ]. + +init_per_suite(Config) -> + %% Optionally enable `perf` for the current node. +% spawn(fun() -> ct:pal(os:cmd("perf record -g -F 9999 -o /tmp/ws_perf.data -p " ++ os:getpid() ++ " -- sleep 60")) end), + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(Name, Config) when Name =:= http; Name =:= http_compress -> + init_info(Name, Config), + cowboy_test:init_common_groups(Name, Config, ?MODULE); +init_per_group(Name, Config) when Name =:= h2c; Name =:= h2c_compress -> + init_info(Name, Config), + {Flavor, Opts} = case Name of + h2c -> {vanilla, #{}}; + h2c_compress -> {compress, #{stream_handlers => [cowboy_compress_h, cowboy_stream_h]}} + end, + Config1 = cowboy_test:init_http(Name, Opts#{ + connection_window_margin_size => 64*1024, + enable_connect_protocol => true, + env => #{dispatch => init_dispatch(Config)}, + max_frame_size_sent => 64*1024, + max_frame_size_received => 16384 * 1024 - 1, + max_received_frame_rate => {10_000_000, 1}, + stream_window_data_threshold => 1024, + stream_window_margin_size => 64*1024 + }, [{flavor, Flavor}|Config]), + lists:keyreplace(protocol, 1, Config1, {protocol, http2}); +init_per_group(ascii, Config) -> + init_text_data("ascii.txt", Config); +init_per_group(mixed, Config) -> + init_text_data("grok_segond.txt", Config); +init_per_group(japanese, Config) -> + init_text_data("japanese.txt", Config); +init_per_group(binary, Config) -> + [{frame_type, binary}|Config]. + +init_info(Name, Config) -> + DataInfo = case config(frame_type, Config) of + text -> config(text_data_filename, Config); + binary -> binary + end, + ConnInfo = case Name of + http -> "cleartext HTTP/1.1"; + http_compress -> "cleartext HTTP/1.1 with compression"; + h2c -> "cleartext HTTP/2"; + h2c_compress -> "cleartext HTTP/2 with compression" + end, + ct:pal("Websocket over ~s (~s)", [ConnInfo, DataInfo]). + +init_text_data(Filename, Config) -> + {ok, Text} = file:read_file(filename:join(config(data_dir, Config), Filename)), + [ + {frame_type, text}, + {text_data, Text}, + {text_data_filename, Filename} + |Config]. + +end_per_group(Name, _Config) -> + cowboy_test:stop_group(Name). + +%% Dispatch configuration. + +init_dispatch(_Config) -> + cowboy_router:compile([ + {"localhost", [ + {"/ws_echo", ws_echo, []}, + {"/ws_ignore", ws_ignore, []} + ]} + ]). + +%% Support functions for testing using Gun. + +do_gun_open_ws(Path, Config) -> + ConnPid = gun_open(Config, #{ + http2_opts => #{ + connection_window_margin_size => 64*1024, + max_frame_size_sent => 64*1024, + max_frame_size_received => 16384 * 1024 - 1, + notify_settings_changed => true, + stream_window_data_threshold => 1024, + stream_window_margin_size => 64*1024 + }, + tcp_opts => [{nodelay, true}], + ws_opts => #{compress => config(flavor, Config) =:= compress} + }), + case config(protocol, Config) of + http -> ok; + http2 -> + {notify, settings_changed, #{enable_connect_protocol := true}} + = gun:await(ConnPid, undefined) %% @todo Maybe have a gun:await/1? + end, + StreamRef = gun:ws_upgrade(ConnPid, Path), + receive + {gun_upgrade, ConnPid, StreamRef, [<<"websocket">>], _} -> + {ok, ConnPid, StreamRef}; + {gun_response, ConnPid, _, _, Status, Headers} -> + exit({ws_upgrade_failed, Status, Headers}); + {gun_error, ConnPid, StreamRef, Reason} -> + exit({ws_upgrade_failed, Reason}) + after 1000 -> + error(timeout) + end. + +receive_ws(ConnPid, StreamRef) -> + receive + {gun_ws, ConnPid, StreamRef, Frame} -> + {ok, Frame} + after 30000 -> + {error, timeout} + end. + +%% Tests. + +echo_1_00064KiB(Config) -> + doc("Send and receive a 64KiB frame."), + do_echo(Config, echo_1, 1, 64 * 1024). + +echo_1_00256KiB(Config) -> + doc("Send and receive a 256KiB frame."), + do_echo(Config, echo_1, 1, 256 * 1024). + +echo_1_01024KiB(Config) -> + doc("Send and receive a 1024KiB frame."), + do_echo(Config, echo_1, 1, 1024 * 1024). + +echo_1_04096KiB(Config) -> + doc("Send and receive a 4096KiB frame."), + do_echo(Config, echo_1, 1, 4096 * 1024). + +%% Minus one because frames can only get so big. +echo_1_16384KiB(Config) -> + doc("Send and receive a 16384KiB - 1 frame."), + do_echo(Config, echo_1, 1, 16384 * 1024 - 1). + +echo_N_00000B(Config) -> + doc("Send and receive a 0B frame 1000 times."), + do_echo(Config, echo_N, 1000, 0). + +echo_N_00256B(Config) -> + doc("Send and receive a 256B frame 1000 times."), + do_echo(Config, echo_N, 1000, 256). + +echo_N_01024B(Config) -> + doc("Send and receive a 1024B frame 1000 times."), + do_echo(Config, echo_N, 1000, 1024). + +echo_N_04096B(Config) -> + doc("Send and receive a 4096B frame 1000 times."), + do_echo(Config, echo_N, 1000, 4096). + +echo_N_16384B(Config) -> + doc("Send and receive a 16384B frame 1000 times."), + do_echo(Config, echo_N, 1000, 16384). + +%echo_N_16384B_10K(Config) -> +% doc("Send and receive a 16384B frame 10000 times."), +% do_echo(Config, echo_N, 10000, 16384). + +do_echo(Config, What, Num, FrameSize) -> + {ok, ConnPid, StreamRef} = do_gun_open_ws("/ws_echo", Config), + FrameType = config(frame_type, Config), + FrameData = case FrameType of + text -> do_text_data(Config, FrameSize); + binary -> rand:bytes(FrameSize) + end, + %% Heat up the processes before doing the real run. +% do_echo_loop(ConnPid, StreamRef, Num, FrameType, FrameData), + {Time, _} = timer:tc(?MODULE, do_echo_loop, [ConnPid, StreamRef, Num, FrameType, FrameData]), + do_log("~-6s ~-6s ~6s: ~8bµs", [What, FrameType, do_format_size(FrameSize), Time]), + gun:ws_send(ConnPid, StreamRef, close), + {ok, close} = receive_ws(ConnPid, StreamRef), + gun_down(ConnPid). + +do_echo_loop(_, _, 0, _, _) -> + ok; +do_echo_loop(ConnPid, StreamRef, Num, FrameType, FrameData) -> + gun:ws_send(ConnPid, StreamRef, {FrameType, FrameData}), + {ok, {FrameType, FrameData}} = receive_ws(ConnPid, StreamRef), + do_echo_loop(ConnPid, StreamRef, Num - 1, FrameType, FrameData). + +send_1_00064KiB(Config) -> + doc("Send a 64KiB frame."), + do_send(Config, send_1, 1, 64 * 1024). + +send_1_00256KiB(Config) -> + doc("Send a 256KiB frame."), + do_send(Config, send_1, 1, 256 * 1024). + +send_1_01024KiB(Config) -> + doc("Send a 1024KiB frame."), + do_send(Config, send_1, 1, 1024 * 1024). + +send_1_04096KiB(Config) -> + doc("Send a 4096KiB frame."), + do_send(Config, send_1, 1, 4096 * 1024). + +%% Minus one because frames can only get so big. +send_1_16384KiB(Config) -> + doc("Send a 16384KiB - 1 frame."), + do_send(Config, send_1, 1, 16384 * 1024 - 1). + +send_N_00000B(Config) -> + doc("Send a 0B frame 10000 times."), + do_send(Config, send_N, 10000, 0). + +send_N_00256B(Config) -> + doc("Send a 256B frame 10000 times."), + do_send(Config, send_N, 10000, 256). + +send_N_01024B(Config) -> + doc("Send a 1024B frame 10000 times."), + do_send(Config, send_N, 10000, 1024). + +send_N_04096B(Config) -> + doc("Send a 4096B frame 10000 times."), + do_send(Config, send_N, 10000, 4096). + +send_N_16384B(Config) -> + doc("Send a 16384B frame 10000 times."), + do_send(Config, send_N, 10000, 16384). + +%send_N_16384B_10K(Config) -> +% doc("Send and receive a 16384B frame 10000 times."), +% do_send(Config, send_N, 10000, 16384). + +do_send(Config, What, Num, FrameSize) -> + {ok, ConnPid, StreamRef} = do_gun_open_ws("/ws_ignore", Config), + FrameType = config(frame_type, Config), + FrameData = case FrameType of + text -> do_text_data(Config, FrameSize); + binary -> rand:bytes(FrameSize) + end, + %% Heat up the processes before doing the real run. +% do_send_loop(ConnPid, StreamRef, Num, FrameType, FrameData), + {Time, _} = timer:tc(?MODULE, do_send_loop, [ConnPid, StreamRef, Num, FrameType, FrameData]), + do_log("~-6s ~-6s ~6s: ~8bµs", [What, FrameType, do_format_size(FrameSize), Time]), + gun:ws_send(ConnPid, StreamRef, close), + {ok, close} = receive_ws(ConnPid, StreamRef), + gun_down(ConnPid). + +do_send_loop(ConnPid, StreamRef, 0, _, _) -> + gun:ws_send(ConnPid, StreamRef, {text, <<"CHECK">>}), + {ok, {text, <<"CHECK">>}} = receive_ws(ConnPid, StreamRef), + ok; +do_send_loop(ConnPid, StreamRef, Num, FrameType, FrameData) -> + gun:ws_send(ConnPid, StreamRef, {FrameType, FrameData}), + do_send_loop(ConnPid, StreamRef, Num - 1, FrameType, FrameData). + +%% Internal. + +do_text_data(Config, FrameSize) -> + do_text_data1(config(text_data, Config), FrameSize). + +do_text_data1(LargeText, FrameSize) when byte_size(LargeText) >= FrameSize -> + binary:part(LargeText, 0, FrameSize); +do_text_data1(LargeText, FrameSize) -> + do_text_data1(<<LargeText/binary, LargeText/binary>>, FrameSize). + +do_format_size(Size) when Size < 1024 -> + integer_to_list(Size) ++ "B"; +do_format_size(Size) when Size < (1024*1024) -> + integer_to_list(Size div 1024) ++ "KiB"; +do_format_size(Size) -> + integer_to_list(Size div (1024*1024)) ++ "MiB". + +do_log(Str, Args) -> + ct:log(Str, Args), + io:format(ct_default_gl, Str ++ "~n", Args). diff --git a/test/ws_perf_SUITE_data/ascii.txt b/test/ws_perf_SUITE_data/ascii.txt new file mode 100644 index 0000000..a9101d2 --- /dev/null +++ b/test/ws_perf_SUITE_data/ascii.txt @@ -0,0 +1,5 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper. Duis arcu massa, scelerisque vitae, consequat in, pretium a, enim. Pellentesque congue. Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim. Pellentesque sed dui ut augue blandit sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aliquam nibh. Mauris ac mauris sed pede pellentesque fermentum. Maecenas adipiscing ante non diam sodales hendrerit. + +Ut velit mauris, egestas sed, gravida nec, ornare ut, mi. Aenean ut orci vel massa suscipit pulvinar. Nulla sollicitudin. Fusce varius, ligula non tempus aliquam, nunc turpis ullamcorper nibh, in tempus sapien eros vitae ligula. Pellentesque rhoncus nunc et augue. Integer id felis. Curabitur aliquet pellentesque diam. Integer quis metus vitae elit lobortis egestas. Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Morbi vel erat non mauris convallis vehicula. Nulla et sapien. Integer tortor tellus, aliquam faucibus, convallis id, congue eu, quam. Mauris ullamcorper felis vitae erat. Proin feugiat, augue non elementum posuere, metus purus iaculis lectus, et tristique ligula justo vitae magna. + +Aliquam convallis sollicitudin purus. Praesent aliquam, enim at fermentum mollis, ligula massa adipiscing nisl, ac euismod nibh nisl eu lectus. Fusce vulputate sem at sapien. Vivamus leo. Aliquam euismod libero eu enim. Nulla nec felis sed leo placerat imperdiet. Aenean suscipit nulla in justo. Suspendisse cursus rutrum augue. Nulla tincidunt tincidunt mi. Curabitur iaculis, lorem vel rhoncus faucibus, felis magna fermentum augue, et ultricies lacus lorem varius purus. Curabitur eu amet. diff --git a/test/ws_perf_SUITE_data/grok_segond.txt b/test/ws_perf_SUITE_data/grok_segond.txt new file mode 100644 index 0000000..614c689 --- /dev/null +++ b/test/ws_perf_SUITE_data/grok_segond.txt @@ -0,0 +1,963 @@ +Some filler content to ensure the split doesn't happen in the middle of a multi-byte character. + +- Extrait de session Grok 2 (license MIT). + +Question : Merci de générer un texte de 32000 caractères dont le but sera de tester un algorithme UTF-8 + +Je vais générer un texte de 32000 caractères qui inclura une variété de caractères pour tester un algorithme UTF-8. Ce texte contiendra des caractères de différents scripts, des symboles spéciaux, des emojis, et des caractères combinés pour couvrir une large gamme de points de code : + +À mesure que la technologie évolue, les systèmes d'encodage des caractères deviennent de plus en plus sophistiqués. L'encodage UTF-8, par exemple, est conçu pour représenter chaque caractère Unicode en utilisant une séquence d'octets. Cela permet de supporter des milliers de langues et symboles. Voici quelques exemples de ce que l'UTF-8 peut encoder : + +Latin : Bonjour le monde! Hello world! +Cyrillique : Привет, мир! +Grec : Γεια σου κόσμε! +Chinois : 你好世界! +Japonais : こんにちは世界! +Coréen : 안녕 세계! +Arabe : مرحبًا بالعالم! +Hébreu : שלום עולם! +Indien (Devanagari) : नमस्ते दुनिया! +Thaï : สวัสดีโลก! + +En plus des scripts de base, UTF-8 peut également encoder des symboles mathématiques, des signes de ponctuation spécifiques, des lettres accentuées, et même des emojis : + +Symboles mathématiques : ∫∑∏×÷√∞ +Emojis : 😀👍🌍🎉🔥 +Caractères combinés : ä, ê, ï, õ, ú (a avec tréma, e avec accent circonflexe, etc.) + +Pour tester l'encodage, on peut utiliser des combinaisons complexes : + +Combinaison de symboles : こんにちは! 😃😄😁🤣👌👍👏🙌🇫🇷🇺🇸🌟✨ +Textes multilingues : La vie est belle en français, 美しい日本の桜, Der Sommer ist schön in Deutschland. + +Pour tester la gestion des caractères spéciaux, voici quelques exemples supplémentaires : + +Ponctuation : .,;:!?¿¡…—–„“«» +Symboles divers : $¢€£¥₩₽₹¤§¶†‡ +Lettres accentuées : àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ +Ligatures : fifl + +Enfin, pour s'assurer que les séquences d'échappement et les caractères de contrôle sont bien gérés : + +Caractères de contrôle : \n\t\r\f\v (nouvelle ligne, tabulation, retour chariot, nouvelle page, tabulation verticale) +Séries de répétition : `abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabc` + +Ce texte devrait fournir un bon test pour un algorithme UTF-8 en raison de sa diversité et de sa longueur. + +Ce texte atteint environ 32000 caractères, incluant une variété de cas pour tester un algorithme d'encodage UTF-8. + +- Extrait de La Bible en français, version Louis Segond 1910 (domaine publique). + +Romains 1 + +1.1 +Paul, serviteur de Jésus Christ, appelé à être apôtre, mis à part pour annoncer l'Évangile de Dieu, - +1.2 +qui avait été promis auparavant de la part de Dieu par ses prophètes dans les saintes Écritures, +1.3 +et qui concerne son Fils (né de la postérité de David, selon la chair, +1.4 +et déclaré Fils de Dieu avec puissance, selon l'Esprit de sainteté, par sa résurrection d'entre les morts), Jésus Christ notre Seigneur, +1.5 +par qui nous avons reçu la grâce et l'apostolat, pour amener en son nom à l'obéissance de la foi tous les païens, +1.6 +parmi lesquels vous êtes aussi, vous qui avez été appelés par Jésus Christ- +1.7 +à tous ceux qui, à Rome, sont bien-aimés de Dieu, appelés à être saints: que la grâce et la paix vous soient données de la part de Dieu notre Père et du Seigneur Jésus Christ! +1.8 +Je rends d'abord grâces à mon Dieu par Jésus Christ, au sujet de vous tous, de ce que votre foi est renommée dans le monde entier. +1.9 +Dieu, que je sers en mon esprit dans l'Évangile de son Fils, m'est témoin que je fais sans cesse mention de vous, +1.10 +demandant continuellement dans mes prières d'avoir enfin, par sa volonté, le bonheur d'aller vers vous. +1.11 +Car je désire vous voir, pour vous communiquer quelque don spirituel, afin que vous soyez affermis, +1.12 +ou plutôt, afin que nous soyons encouragés ensemble au milieu de vous par la foi qui nous est commune, à vous et à moi. +1.13 +Je ne veux pas vous laisser ignorer, frères, que j'ai souvent formé le projet d'aller vous voir, afin de recueillir quelque fruit parmi vous, comme parmi les autres nations; mais j'en ai été empêché jusqu'ici. +1.14 +Je me dois aux Grecs et aux barbares, aux savants et aux ignorants. +1.15 +Ainsi j'ai un vif désir de vous annoncer aussi l'Évangile, à vous qui êtes à Rome. +1.16 +Car je n'ai point honte de l'Évangile: c'est une puissance de Dieu pour le salut de quiconque croit, du Juif premièrement, puis du Grec, +1.17 +parce qu'en lui est révélée la justice de Dieu par la foi et pour la foi, selon qu'il est écrit: Le juste vivra par la foi. +1.18 +La colère de Dieu se révèle du ciel contre toute impiété et toute injustice des hommes qui retiennent injustement la vérité captive, +1.19 +car ce qu'on peut connaître de Dieu est manifeste pour eux, Dieu le leur ayant fait connaître. +1.20 +En effet, les perfections invisibles de Dieu, sa puissance éternelle et sa divinité, se voient comme à l'oeil, depuis la création du monde, quand on les considère dans ses ouvrages. Ils sont donc inexcusables, +1.21 +puisque ayant connu Dieu, ils ne l'ont point glorifié comme Dieu, et ne lui ont point rendu grâces; mais ils se sont égarés dans leurs pensées, et leur coeur sans intelligence a été plongé dans les ténèbres. +1.22 +Se vantant d'être sages, ils sont devenus fous; +1.23 +et ils ont changé la gloire du Dieu incorruptible en images représentant l'homme corruptible, des oiseaux, des quadrupèdes, et des reptiles. +1.24 +C'est pourquoi Dieu les a livrés à l'impureté, selon les convoitises de leurs coeurs; en sorte qu'ils déshonorent eux-mêmes leurs propres corps; +1.25 +eux qui ont changé la vérité de Dieu en mensonge, et qui ont adoré et servi la créature au lieu du Créateur, qui est béni éternellement. Amen! +1.26 +C'est pourquoi Dieu les a livrés à des passions infâmes: car leurs femmes ont changé l'usage naturel en celui qui est contre nature; +1.27 +et de même les hommes, abandonnant l'usage naturel de la femme, se sont enflammés dans leurs désirs les uns pour les autres, commettant homme avec homme des choses infâmes, et recevant en eux-mêmes le salaire que méritait leur égarement. +1.28 +Comme ils ne se sont pas souciés de connaître Dieu, Dieu les a livrés à leur sens réprouvé, pour commettre des choses indignes, +1.29 +étant remplis de toute espèce d'injustice, de méchanceté, de cupidité, de malice; pleins d'envie, de meurtre, de querelle, de ruse, de malignité; +1.30 +rapporteurs, médisants, impies, arrogants, hautains, fanfarons, ingénieux au mal, rebelles à leurs parents, dépourvus d'intelligence, +1.31 +de loyauté, d'affection naturelle, de miséricorde. +1.32 +Et, bien qu'ils connaissent le jugement de Dieu, déclarant dignes de mort ceux qui commettent de telles choses, non seulement ils les font, mais ils approuvent ceux qui les font. + +Romains 2 + +2.1 +O homme, qui que tu sois, toi qui juges, tu es donc inexcusable; car, en jugeant les autres, tu te condamnes toi-même, puisque toi qui juges, tu fais les mêmes choses. +2.2 +Nous savons, en effet, que le jugement de Dieu contre ceux qui commettent de telles choses est selon la vérité. +2.3 +Et penses-tu, ô homme, qui juges ceux qui commettent de telles choses, et qui les fais, que tu échapperas au jugement de Dieu? +2.4 +Ou méprises-tu les richesses de sa bonté, de sa patience et de sa longanimité, ne reconnaissant pas que la bonté de Dieu te pousse à la repentance? +2.5 +Mais, par ton endurcissement et par ton coeur impénitent, tu t'amasses un trésor de colère pour le jour de la colère et de la manifestation du juste jugement de Dieu, +2.6 +qui rendra à chacun selon ses oeuvres; +2.7 +réservant la vie éternelle à ceux qui, par la persévérance à bien faire, cherchent l'honneur, la gloire et l'immortalité; +2.8 +mais l'irritation et la colère à ceux qui, par esprit de dispute, sont rebelles à la vérité et obéissent à l'injustice. +2.9 +Tribulation et angoisse sur toute âme d'homme qui fait le mal, sur le Juif premièrement, puis sur le Grec! +2.10 +Gloire, honneur et paix pour quiconque fait le bien, pour le Juif premièrement, puis pour le Grec! +2.11 +Car devant Dieu il n'y a point d'acception de personnes. +2.12 +Tous ceux qui ont péché sans la loi périront aussi sans la loi, et tous ceux qui ont péché avec la loi seront jugés par la loi. +2.13 +Ce ne sont pas, en effet, ceux qui écoutent la loi qui sont justes devant Dieu, mais ce sont ceux qui la mettent en pratique qui seront justifiés. +2.14 +Quand les païens, qui n'ont point la loi, font naturellement ce que prescrit la loi, ils sont, eux qui n'ont point la loi, une loi pour eux-mêmes; +2.15 +ils montrent que l'oeuvre de la loi est écrite dans leurs coeurs, leur conscience en rendant témoignage, et leurs pensées s'accusant ou se défendant tour à tour. +2.16 +C'est ce qui paraîtra au jour où, selon mon Évangile, Dieu jugera par Jésus Christ les actions secrètes des hommes. +2.17 +Toi qui te donnes le nom de Juif, qui te reposes sur la loi, qui te glorifies de Dieu, +2.18 +qui connais sa volonté, qui apprécies la différence des choses, étant instruit par la loi; +2.19 +toi qui te flattes d'être le conducteur des aveugles, la lumière de ceux qui sont dans les ténèbres, +2.20 +le docteur des insensés, le maître des ignorants, parce que tu as dans la loi la règle de la science et de la vérité; +2.21 +toi donc, qui enseignes les autres, tu ne t'enseignes pas toi-même! Toi qui prêches de ne pas dérober, tu dérobes! +2.22 +Toi qui dis de ne pas commettre d'adultère, tu commets l'adultère! Toi qui as en abomination les idoles, tu commets des sacrilèges! +2.23 +Toi qui te fais une gloire de la loi, tu déshonores Dieu par la transgression de la loi! +2.24 +Car le nom de Dieu est à cause de vous blasphémé parmi les païens, comme cela est écrit. +2.25 +La circoncision est utile, si tu mets en pratique la loi; mais si tu transgresses la loi, ta circoncision devient incirconcision. +2.26 +Si donc l'incirconcis observe les ordonnances de la loi, son incirconcision ne sera-t-elle pas tenue pour circoncision? +2.27 +L'incirconcis de nature, qui accomplit la loi, ne te condamnera-t-il pas, toi qui la transgresses, tout en ayant la lettre de la loi et la circoncision? +2.28 +Le Juif, ce n'est pas celui qui en a les dehors; et la circoncision, ce n'est pas celle qui est visible dans la chair. +2.29 +Mais le Juif, c'est celui qui l'est intérieurement; et la circoncision, c'est celle du coeur, selon l'esprit et non selon la lettre. La louange de ce Juif ne vient pas des hommes, mais de Dieu. + +Romains 3 + +3.1 +Quel est donc l'avantage des Juifs, ou quelle est l'utilité de la circoncision? +3.2 +Il est grand de toute manière, et tout d'abord en ce que les oracles de Dieu leur ont été confiés. +3.3 +Eh quoi! si quelques-uns n'ont pas cru, leur incrédulité anéantira-t-elle la fidélité de Dieu? +3.4 +Loin de là! Que Dieu, au contraire, soit reconnu pour vrai, et tout homme pour menteur, selon qu'il est écrit: Afin que tu sois trouvé juste dans tes paroles, Et que tu triomphes lorsqu'on te juge. +3.5 +Mais si notre injustice établit la justice de Dieu, que dirons-nous? Dieu est-il injuste quand il déchaîne sa colère? (Je parle à la manière des hommes.) +3.6 +Loin de là! Autrement, comment Dieu jugerait-il le monde? +3.7 +Et si, par mon mensonge, la vérité de Dieu éclate davantage pour sa gloire, pourquoi suis-je moi-même encore jugé comme pécheur? +3.8 +Et pourquoi ne ferions-nous pas le mal afin qu'il en arrive du bien, comme quelques-uns, qui nous calomnient, prétendent que nous le disons? La condamnation de ces gens est juste. +3.9 +Quoi donc! sommes-nous plus excellents? Nullement. Car nous avons déjà prouvé que tous, Juifs et Grecs, sont sous l'empire du péché, +3.10 +selon qu'il est écrit: Il n'y a point de juste, Pas même un seul; +3.11 +Nul n'est intelligent, Nul ne cherche Dieu; Tous sont égarés, tous sont pervertis; +3.12 +Il n'en est aucun qui fasse le bien, Pas même un seul; +3.13 +Leur gosier est un sépulcre ouvert; Ils se servent de leurs langues pour tromper; Ils ont sous leurs lèvres un venin d'aspic; +3.14 +Leur bouche est pleine de malédiction et d'amertume; +3.15 +Ils ont les pieds légers pour répandre le sang; +3.16 +La destruction et le malheur sont sur leur route; +3.17 +Ils ne connaissent pas le chemin de la paix; +3.18 +La crainte de Dieu n'est pas devant leurs yeux. +3.19 +Or, nous savons que tout ce que dit la loi, elle le dit à ceux qui sont sous la loi, afin que toute bouche soit fermée, et que tout le monde soit reconnu coupable devant Dieu. +3.20 +Car nul ne sera justifié devant lui par les oeuvres de la loi, puisque c'est par la loi que vient la connaissance du péché. +3.21 +Mais maintenant, sans la loi est manifestée la justice de Dieu, à laquelle rendent témoignage la loi et les prophètes, +3.22 +justice de Dieu par la foi en Jésus Christ pour tous ceux qui croient. Il n'y a point de distinction. +3.23 +Car tous ont péché et sont privés de la gloire de Dieu; +3.24 +et ils sont gratuitement justifiés par sa grâce, par le moyen de la rédemption qui est en Jésus Christ. +3.25 +C'est lui que Dieu a destiné, par son sang, à être, pour ceux qui croiraient victime propitiatoire, afin de montrer sa justice, parce qu'il avait laissé impunis les péchés commis auparavant, au temps de sa patience, afin, dis-je, +3.26 +de montrer sa justice dans le temps présent, de manière à être juste tout en justifiant celui qui a la foi en Jésus. +3.27 +Où donc est le sujet de se glorifier? Il est exclu. Par quelle loi? Par la loi des oeuvres? Non, mais par la loi de la foi. +3.28 +Car nous pensons que l'homme est justifié par la foi, sans les oeuvres de la loi. +3.29 +Ou bien Dieu est-il seulement le Dieu des Juifs? Ne l'est-il pas aussi des païens? Oui, il l'est aussi des païens, +3.30 +puisqu'il y a un seul Dieu, qui justifiera par la foi les circoncis, et par la foi les incirconcis. +3.31 +Anéantissons-nous donc la loi par la foi? Loin de là! Au contraire, nous confirmons la loi. + +Romains 4 + +4.1 +Que dirons-nous donc qu'Abraham, notre père, a obtenu selon la chair? +4.2 +Si Abraham a été justifié par les oeuvres, il a sujet de se glorifier, mais non devant Dieu. +4.3 +Car que dit l'Écriture? Abraham crut à Dieu, et cela lui fut imputé à justice. +4.4 +Or, à celui qui fait une oeuvre, le salaire est imputé, non comme une grâce, mais comme une chose due; +4.5 +et à celui qui ne fait point d'oeuvre, mais qui croit en celui qui justifie l'impie, sa foi lui est imputée à justice. +4.6 +De même David exprime le bonheur de l'homme à qui Dieu impute la justice sans les oeuvres: +4.7 +Heureux ceux dont les iniquités sont pardonnées, Et dont les péchés sont couverts! +4.8 +Heureux l'homme à qui le Seigneur n'impute pas son péché! +4.9 +Ce bonheur n'est-il que pour les circoncis, ou est-il également pour les incirconcis? Car nous disons que la foi fut imputée à justice à Abraham. +4.10 +Comment donc lui fut-elle imputée? Était-ce après, ou avant sa circoncision? Il n'était pas encore circoncis, il était incirconcis. +4.11 +Et il reçut le signe de la circoncision, comme sceau de la justice qu'il avait obtenue par la foi quand il était incirconcis, afin d'être le père de tous les incirconcis qui croient, pour que la justice leur fût aussi imputée, +4.12 +et le père des circoncis, qui ne sont pas seulement circoncis, mais encore qui marchent sur les traces de la foi de notre père Abraham quand il était incirconcis. +4.13 +En effet, ce n'est pas par la loi que l'héritage du monde a été promis à Abraham ou à sa postérité, c'est par la justice de la foi. +4.14 +Car, si les héritiers le sont par la loi, la foi est vaine, et la promesse est anéantie, +4.15 +parce que la loi produit la colère, et que là où il n'y a point de loi il n'y a point non plus de transgression. +4.16 +C'est pourquoi les héritiers le sont par la foi, pour que ce soit par grâce, afin que la promesse soit assurée à toute la postérité, non seulement à celle qui est sous la loi, mais aussi à celle qui a la foi d'Abraham, notre père à tous, selon qu'il est écrit: +4.17 +Je t'ai établi père d'un grand nombre de nations. Il est notre père devant celui auquel il a cru, Dieu, qui donne la vie aux morts, et qui appelle les choses qui ne sont point comme si elles étaient. +4.18 +Espérant contre toute espérance, il crut, en sorte qu'il devint père d'un grand nombre de nations, selon ce qui lui avait été dit: Telle sera ta postérité. +4.19 +Et, sans faiblir dans la foi, il ne considéra point que son corps était déjà usé, puisqu'il avait près de cent ans, et que Sara n'était plus en état d'avoir des enfants. +4.20 +Il ne douta point, par incrédulité, au sujet de la promesse de Dieu; mais il fut fortifié par la foi, donnant gloire à Dieu, +4.21 +et ayant la pleine conviction que ce qu'il promet il peut aussi l'accomplir. +4.22 +C'est pourquoi cela lui fut imputé à justice. +4.23 +Mais ce n'est pas à cause de lui seul qu'il est écrit que cela lui fut imputé; +4.24 +c'est encore à cause de nous, à qui cela sera imputé, à nous qui croyons en celui qui a ressuscité des morts Jésus notre Seigneur, +4.25 +lequel a été livré pour nos offenses, et est ressuscité pour notre justification. + +Romains 5 + +5.1 +Étant donc justifiés par la foi, nous avons la paix avec Dieu par notre Seigneur Jésus Christ, +5.2 +à qui nous devons d'avoir eu par la foi accès à cette grâce, dans laquelle nous demeurons fermes, et nous nous glorifions dans l'espérance de la gloire de Dieu. +5.3 +Bien plus, nous nous glorifions même des afflictions, sachant que l'affliction produit la persévérance, +5.4 +la persévérance la victoire dans l'épreuve, et cette victoire l'espérance. +5.5 +Or, l'espérance ne trompe point, parce que l'amour de Dieu est répandu dans nos coeurs par le Saint Esprit qui nous a été donné. +5.6 +Car, lorsque nous étions encore sans force, Christ, au temps marqué, est mort pour des impies. +5.7 +A peine mourrait-on pour un juste; quelqu'un peut-être mourrait-il pour un homme de bien. +5.8 +Mais Dieu prouve son amour envers nous, en ce que, lorsque nous étions encore des pécheurs, Christ est mort pour nous. +5.9 +A plus forte raison donc, maintenant que nous sommes justifiés par son sang, serons-nous sauvés par lui de la colère. +5.10 +Car si, lorsque nous étions ennemis, nous avons été réconciliés avec Dieu par la mort de son Fils, à plus forte raison, étant réconciliés, serons-nous sauvés par sa vie. +5.11 +Et non seulement cela, mais encore nous nous glorifions en Dieu par notre Seigneur Jésus Christ, par qui maintenant nous avons obtenu la réconciliation. +5.12 +C'est pourquoi, comme par un seul homme le péché est entré dans le monde, et par le péché la mort, et qu'ainsi la mort s'est étendue sur tous les hommes, parce que tous ont péché,... +5.13 +car jusqu'à la loi le péché était dans le monde. Or, le péché n'est pas imputé, quand il n'y a point de loi. +5.14 +Cependant la mort a régné depuis Adam jusqu'à Moïse, même sur ceux qui n'avaient pas péché par une transgression semblable à celle d'Adam, lequel est la figure de celui qui devait venir. +5.15 +Mais il n'en est pas du don gratuit comme de l'offense; car, si par l'offense d'un seul il en est beaucoup qui sont morts, à plus forte raison la grâce de Dieu et le don de la grâce venant d'un seul homme, Jésus Christ, ont-ils été abondamment répandus sur beaucoup. +5.16 +Et il n'en est pas du don comme de ce qui est arrivé par un seul qui a péché; car c'est après une seule offense que le jugement est devenu condamnation, tandis que le don gratuit devient justification après plusieurs offenses. +5.17 +Si par l'offense d'un seul la mort a régné par lui seul, à plus forte raison ceux qui reçoivent l'abondance de la grâce et du don de la justice régneront-ils dans la vie par Jésus Christ lui seul. +5.18 +Ainsi donc, comme par une seule offense la condamnation a atteint tous les hommes, de même par un seul acte de justice la justification qui donne la vie s'étend à tous les hommes. +5.19 +Car, comme par la désobéissance d'un seul homme beaucoup ont été rendus pécheurs, de même par l'obéissance d'un seul beaucoup seront rendus justes. +5.20 +Or, la loi est intervenue pour que l'offense abondât, mais là où le péché a abondé, la grâce a surabondé, +5.21 +afin que, comme le péché a régné par la mort, ainsi la grâce régnât par la justice pour la vie éternelle, par Jésus Christ notre Seigneur. + +Romains 6 + +6.1 +Que dirons-nous donc? Demeurerions-nous dans le péché, afin que la grâce abonde? +6.2 +Loin de là! Nous qui sommes morts au péché, comment vivrions-nous encore dans le péché? +6.3 +Ignorez-vous que nous tous qui avons été baptisés en Jésus Christ, c'est en sa mort que nous avons été baptisés? +6.4 +Nous avons donc été ensevelis avec lui par le baptême en sa mort, afin que, comme Christ est ressuscité des morts par la gloire du Père, de même nous aussi nous marchions en nouveauté de vie. +6.5 +En effet, si nous sommes devenus une même plante avec lui par la conformité à sa mort, nous le serons aussi par la conformité à sa résurrection, +6.6 +sachant que notre vieil homme a été crucifié avec lui, afin que le corps du péché fût détruit, pour que nous ne soyons plus esclaves du péché; +6.7 +car celui qui est mort est libre du péché. +6.8 +Or, si nous sommes morts avec Christ, nous croyons que nous vivrons aussi avec lui, +6.9 +sachant que Christ ressuscité des morts ne meurt plus; la mort n'a plus de pouvoir sur lui. +6.10 +Car il est mort, et c'est pour le péché qu'il est mort une fois pour toutes; il est revenu à la vie, et c'est pour Dieu qu'il vit. +6.11 +Ainsi vous-mêmes, regardez-vous comme morts au péché, et comme vivants pour Dieu en Jésus Christ. +6.12 +Que le péché ne règne donc point dans votre corps mortel, et n'obéissez pas à ses convoitises. +6.13 +Ne livrez pas vos membres au péché, comme des instruments d'iniquité; mais donnez-vous vous-mêmes à Dieu, comme étant vivants de morts que vous étiez, et offrez à Dieu vos membres, comme des instruments de justice. +6.14 +Car le péché n'aura point de pouvoir sur vous, puisque vous êtes, non sous la loi, mais sous la grâce. +6.15 +Quoi donc! Pécherions-nous, parce que nous sommes, non sous la loi, mais sous la grâce? Loin de là! +6.16 +Ne savez-vous pas qu'en vous livrant à quelqu'un comme esclaves pour lui obéir, vous êtes esclaves de celui à qui vous obéissez, soit du péché qui conduit à la mort, soit de l'obéissance qui conduit à la justice? +6.17 +Mais grâces soient rendues à Dieu de ce que, après avoir été esclaves du péché, vous avez obéi de coeur à la règle de doctrine dans laquelle vous avez été instruits. +6.18 +Ayant été affranchis du péché, vous êtes devenus esclaves de la justice. - +6.19 +Je parle à la manière des hommes, à cause de la faiblesse de votre chair. -De même donc que vous avez livré vos membres comme esclaves à l'impureté et à l'iniquité, pour arriver à l'iniquité, ainsi maintenant livrez vos membres comme esclaves à la justice, pour arriver à la sainteté. +6.20 +Car, lorsque vous étiez esclaves du péché, vous étiez libres à l'égard de la justice. +6.21 +Quels fruits portiez-vous alors? Des fruits dont vous rougissez aujourd'hui. Car la fin de ces choses, c'est la mort. +6.22 +Mais maintenant, étant affranchis du péché et devenus esclaves de Dieu, vous avez pour fruit la sainteté et pour fin la vie éternelle. +6.23 +Car le salaire du péché, c'est la mort; mais le don gratuit de Dieu, c'est la vie éternelle en Jésus Christ notre Seigneur. + +Romains 7 + +7.1 +Ignorez-vous, frères, -car je parle à des gens qui connaissent la loi, -que la loi exerce son pouvoir sur l'homme aussi longtemps qu'il vit? +7.2 +Ainsi, une femme mariée est liée par la loi à son mari tant qu'il est vivant; mais si le mari meurt, elle est dégagée de la loi qui la liait à son mari. +7.3 +Si donc, du vivant de son mari, elle devient la femme d'un autre homme, elle sera appelée adultère; mais si le mari meurt, elle est affranchie de la loi, de sorte qu'elle n'est point adultère en devenant la femme d'un autre. +7.4 +De même, mes frères, vous aussi vous avez été, par le corps de Christ, mis à mort en ce qui concerne la loi, pour que vous apparteniez à un autre, à celui qui est ressuscité des morts, afin que nous portions des fruits pour Dieu. +7.5 +Car, lorsque nous étions dans la chair, les passions des péchés provoquées par la loi agissaient dans nos membres, de sorte que nous portions des fruits pour la mort. +7.6 +Mais maintenant, nous avons été dégagés de la loi, étant morts à cette loi sous laquelle nous étions retenus, de sorte que nous servons dans un esprit nouveau, et non selon la lettre qui a vieilli. +7.7 +Que dirons-nous donc? La loi est-elle péché? Loin de là! Mais je n'ai connu le péché que par la loi. Car je n'aurais pas connu la convoitise, si la loi n'eût dit: Tu ne convoiteras point. +7.8 +Et le péché, saisissant l'occasion, produisit en moi par le commandement toutes sortes de convoitises; car sans loi le péché est mort. +7.9 +Pour moi, étant autrefois sans loi, je vivais; mais quand le commandement vint, le péché reprit vie, et moi je mourus. +7.10 +Ainsi, le commandement qui conduit à la vie se trouva pour moi conduire à la mort. +7.11 +Car le péché saisissant l'occasion, me séduisit par le commandement, et par lui me fit mourir. +7.12 +La loi donc est sainte, et le commandement est saint, juste et bon. +7.13 +Ce qui est bon a-t-il donc été pour moi une cause de mort? Loin de là! Mais c'est le péché, afin qu'il se manifestât comme péché en me donnant la mort par ce qui est bon, et que, par le commandement, il devînt condamnable au plus haut point. +7.14 +Nous savons, en effet, que la loi est spirituelle; mais moi, je suis charnel, vendu au péché. +7.15 +Car je ne sais pas ce que je fais: je ne fais point ce que je veux, et je fais ce que je hais. +7.16 +Or, si je fais ce que je ne veux pas, je reconnais par là que la loi est bonne. +7.17 +Et maintenant ce n'est plus moi qui le fais, mais c'est le péché qui habite en moi. +7.18 +Ce qui est bon, je le sais, n'habite pas en moi, c'est-à-dire dans ma chair: j'ai la volonté, mais non le pouvoir de faire le bien. +7.19 +Car je ne fais pas le bien que je veux, et je fais le mal que je ne veux pas. +7.20 +Et si je fais ce que je ne veux pas, ce n'est plus moi qui le fais, c'est le péché qui habite en moi. +7.21 +Je trouve donc en moi cette loi: quand je veux faire le bien, le mal est attaché à moi. +7.22 +Car je prends plaisir à la loi de Dieu, selon l'homme intérieur; +7.23 +mais je vois dans mes membres une autre loi, qui lutte contre la loi de mon entendement, et qui me rend captif de la loi du péché, qui est dans mes membres. +7.24 +Misérable que je suis! Qui me délivrera du corps de cette mort?... +7.25 +Grâces soient rendues à Dieu par Jésus Christ notre Seigneur!... Ainsi donc, moi-même, je suis par l'entendement esclave de la loi de Dieu, et je suis par la chair esclave de la loi du péché. + +Romains 8 + +8.1 +Il n'y a donc maintenant aucune condamnation pour ceux qui sont en Jésus Christ. +8.2 +En effet, la loi de l'esprit de vie en Jésus Christ m'a affranchi de la loi du péché et de la mort. +8.3 +Car-chose impossible à la loi, parce que la chair la rendait sans force, -Dieu a condamné le péché dans la chair, en envoyant, à cause du péché, son propre Fils dans une chair semblable à celle du péché, +8.4 +et cela afin que la justice de la loi fût accomplie en nous, qui marchons, non selon la chair, mais selon l'esprit. +8.5 +Ceux, en effet, qui vivent selon la chair, s'affectionnent aux choses de la chair, tandis que ceux qui vivent selon l'esprit s'affectionnent aux choses de l'esprit. +8.6 +Et l'affection de la chair, c'est la mort, tandis que l'affection de l'esprit, c'est la vie et la paix; +8.7 +car l'affection de la chair est inimitié contre Dieu, parce qu'elle ne se soumet pas à la loi de Dieu, et qu'elle ne le peut même pas. +8.8 +Or ceux qui vivent selon la chair ne sauraient plaire à Dieu. +8.9 +Pour vous, vous ne vivez pas selon la chair, mais selon l'esprit, si du moins l'Esprit de Dieu habite en vous. Si quelqu'un n'a pas l'Esprit de Christ, il ne lui appartient pas. +8.10 +Et si Christ est en vous, le corps, il est vrai, est mort à cause du péché, mais l'esprit est vie à cause de la justice. +8.11 +Et si l'Esprit de celui qui a ressuscité Jésus d'entre les morts habite en vous, celui qui a ressuscité Christ d'entre les morts rendra aussi la vie à vos corps mortels par son Esprit qui habite en vous. +8.12 +Ainsi donc, frères, nous ne sommes point redevables à la chair, pour vivre selon la chair. +8.13 +Si vous vivez selon la chair, vous mourrez; mais si par l'Esprit vous faites mourir les actions du corps, vous vivrez, +8.14 +car tous ceux qui sont conduits par l'Esprit de Dieu sont fils de Dieu. +8.15 +Et vous n'avez point reçu un esprit de servitude, pour être encore dans la crainte; mais vous avez reçu un Esprit d'adoption, par lequel nous crions: Abba! Père! +8.16 +L'Esprit lui-même rend témoignage à notre esprit que nous sommes enfants de Dieu. +8.17 +Or, si nous sommes enfants, nous sommes aussi héritiers: héritiers de Dieu, et cohéritiers de Christ, si toutefois nous souffrons avec lui, afin d'être glorifiés avec lui. +8.18 +J'estime que les souffrances du temps présent ne sauraient être comparées à la gloire à venir qui sera révélée pour nous. +8.19 +Aussi la création attend-elle avec un ardent désir la révélation des fils de Dieu. +8.20 +Car la création a été soumise à la vanité, -non de son gré, mais à cause de celui qui l'y a soumise, - +8.21 +avec l'espérance qu'elle aussi sera affranchie de la servitude de la corruption, pour avoir part à la liberté de la gloire des enfants de Dieu. +8.22 +Or, nous savons que, jusqu'à ce jour, la création tout entière soupire et souffre les douleurs de l'enfantement. +8.23 +Et ce n'est pas elle seulement; mais nous aussi, qui avons les prémices de l'Esprit, nous aussi nous soupirons en nous-mêmes, en attendant l'adoption, la rédemption de notre corps. +8.24 +Car c'est en espérance que nous sommes sauvés. Or, l'espérance qu'on voit n'est plus espérance: ce qu'on voit, peut-on l'espérer encore? +8.25 +Mais si nous espérons ce que nous ne voyons pas, nous l'attendons avec persévérance. +8.26 +De même aussi l'Esprit nous aide dans notre faiblesse, car nous ne savons pas ce qu'il nous convient de demander dans nos prières. Mais l'Esprit lui-même intercède par des soupirs inexprimables; +8.27 +et celui qui sonde les coeurs connaît quelle est la pensée de l'Esprit, parce que c'est selon Dieu qu'il intercède en faveur des saints. +8.28 +Nous savons, du reste, que toutes choses concourent au bien de ceux qui aiment Dieu, de ceux qui sont appelés selon son dessein. +8.29 +Car ceux qu'il a connus d'avance, il les a aussi prédestinés à être semblables à l'image de son Fils, afin que son Fils fût le premier-né entre plusieurs frères. +8.30 +Et ceux qu'il a prédestinés, il les a aussi appelés; et ceux qu'il a appelés, il les a aussi justifiés; et ceux qu'il a justifiés, il les a aussi glorifiés. +8.31 +Que dirons-nous donc à l'égard de ces choses? Si Dieu est pour nous, qui sera contre nous? +8.32 +Lui, qui n'a point épargné son propre Fils, mais qui l'a livré pour nous tous, comment ne nous donnera-t-il pas aussi toutes choses avec lui? +8.33 +Qui accusera les élus de Dieu? C'est Dieu qui justifie! +8.34 +Qui les condamnera? Christ est mort; bien plus, il est ressuscité, il est à la droite de Dieu, et il intercède pour nous! +8.35 +Qui nous séparera de l'amour de Christ? Sera-ce la tribulation, ou l'angoisse, ou la persécution, ou la faim, ou la nudité, ou le péril, ou l'épée? +8.36 +selon qu'il est écrit: C'est à cause de toi qu'on nous met à mort tout le jour, Qu'on nous regarde comme des brebis destinées à la boucherie. +8.37 +Mais dans toutes ces choses nous sommes plus que vainqueurs par celui qui nous a aimés. +8.38 +Car j'ai l'assurance que ni la mort ni la vie, ni les anges ni les dominations, ni les choses présentes ni les choses à venir, +8.39 +ni les puissances, ni la hauteur, ni la profondeur, ni aucune autre créature ne pourra nous séparer de l'amour de Dieu manifesté en Jésus Christ notre Seigneur. + +Romains 9 + +9.1 +Je dis la vérité en Christ, je ne mens point, ma conscience m'en rend témoignage par le Saint Esprit: +9.2 +J'éprouve une grande tristesse, et j'ai dans le coeur un chagrin continuel. +9.3 +Car je voudrais moi-même être anathème et séparé de Christ pour mes frères, mes parents selon la chair, +9.4 +qui sont Israélites, à qui appartiennent l'adoption, et la gloire, et les alliances, et la loi, et le culte, +9.5 +et les promesses, et les patriarches, et de qui est issu, selon la chair, le Christ, qui est au-dessus de toutes choses, Dieu béni éternellement. Amen! +9.6 +Ce n'est point à dire que la parole de Dieu soit restée sans effet. Car tous ceux qui descendent d'Israël ne sont pas Israël, +9.7 +et, pour être la postérité d'Abraham, ils ne sont pas tous ses enfants; mais il est dit: En Isaac sera nommée pour toi une postérité, +9.8 +c'est-à-dire que ce ne sont pas les enfants de la chair qui sont enfants de Dieu, mais que ce sont les enfants de la promesse qui sont regardés comme la postérité. +9.9 +Voici, en effet, la parole de la promesse: Je reviendrai à cette même époque, et Sara aura un fils. +9.10 +Et, de plus, il en fut ainsi de Rébecca, qui conçut du seul Isaac notre père; +9.11 +car, quoique les enfants ne fussent pas encore nés et ils n'eussent fait ni bien ni mal, -afin que le dessein d'élection de Dieu subsistât, sans dépendre des oeuvres, et par la seule volonté de celui qui appelle, - +9.12 +il fut dit à Rébecca: L'aîné sera assujetti au plus jeune; selon qu'il est écrit: +9.13 +J'ai aimé Jacob Et j'ai haï Ésaü. +9.14 +Que dirons-nous donc? Y a-t-il en Dieu de l'injustice? Loin de là! +9.15 +Car il dit à Moïse: Je ferai miséricorde à qui je fais miséricorde, et j'aurai compassion de qui j'ai compassion. +9.16 +Ainsi donc, cela ne dépend ni de celui qui veut, ni de celui qui court, mais de Dieu qui fait miséricorde. +9.17 +Car l'Écriture dit à Pharaon: Je t'ai suscité à dessein pour montrer en toi ma puissance, et afin que mon nom soit publié par toute la terre. +9.18 +Ainsi, il fait miséricorde à qui il veut, et il endurcit qui il veut. +9.19 +Tu me diras: Pourquoi blâme-t-il encore? Car qui est-ce qui résiste à sa volonté? +9.20 +O homme, toi plutôt, qui es-tu pour contester avec Dieu? Le vase d'argile dira-t-il à celui qui l'a formé: Pourquoi m'as-tu fait ainsi? +9.21 +Le potier n'est-il pas maître de l'argile, pour faire avec la même masse un vase d'honneur et un vase d'un usage vil? +9.22 +Et que dire, si Dieu, voulant montrer sa colère et faire connaître sa puissance, a supporté avec une grande patience des vases de colère formés pour la perdition, +9.23 +et s'il a voulu faire connaître la richesse de sa gloire envers des vases de miséricorde qu'il a d'avance préparés pour la gloire? +9.24 +Ainsi nous a-t-il appelés, non seulement d'entre les Juifs, mais encore d'entre les païens, +9.25 +selon qu'il le dit dans Osée: J'appellerai mon peuple celui qui n'était pas mon peuple, et bien-aimée celle qui n'était pas la bien-aimée; +9.26 +et là où on leur disait: Vous n'êtes pas mon peuple! ils seront appelés fils du Dieu vivant. +9.27 +Ésaïe, de son côté, s'écrie au sujet d'Israël: Quand le nombre des fils d'Israël serait comme le sable de la mer, Un reste seulement sera sauvé. +9.28 +Car le Seigneur exécutera pleinement et promptement sur la terre ce qu'il a résolu. +9.29 +Et, comme Ésaïe l'avait dit auparavant: Si le Seigneur des armées Ne nous eût laissé une postérité, Nous serions devenus comme Sodome, Nous aurions été semblables à Gomorrhe. +9.30 +Que dirons-nous donc? Les païens, qui ne cherchaient pas la justice, ont obtenu la justice, la justice qui vient de la foi, +9.31 +tandis qu'Israël, qui cherchait une loi de justice, n'est pas parvenu à cette loi. +9.32 +Pourquoi? Parce qu'Israël l'a cherchée, non par la foi, mais comme provenant des oeuvres. Ils se sont heurtés contre la pierre d'achoppement, +9.33 +selon qu'il est écrit: Voici, je mets en Sion une pierre d'achoppement Et un rocher de scandale, Et celui qui croit en lui ne sera point confus. + +Romains 10 + +10.1 +Frères, le voeu de mon coeur et ma prière à Dieu pour eux, c'est qu'ils soient sauvés. +10.2 +Je leur rends le témoignage qu'ils ont du zèle pour Dieu, mais sans intelligence: +10.3 +ne connaissant pas la justice de Dieu, et cherchant à établir leur propre justice, ils ne se sont pas soumis à la justice de Dieu; +10.4 +car Christ est la fin de la loi, pour la justification de tous ceux qui croient. +10.5 +En effet, Moïse définit ainsi la justice qui vient de la loi: L'homme qui mettra ces choses en pratique vivra par elles. +10.6 +Mais voici comment parle la justice qui vient de la foi: Ne dis pas en ton coeur: Qui montera au ciel? c'est en faire descendre Christ; +10.7 +ou: Qui descendra dans l'abîme? c'est faire remonter Christ d'entre les morts. +10.8 +Que dit-elle donc? La parole est près de toi, dans ta bouche et dans ton coeur. Or, c'est la parole de la foi, que nous prêchons. +10.9 +Si tu confesses de ta bouche le Seigneur Jésus, et si tu crois dans ton coeur que Dieu l'a ressuscité des morts, tu seras sauvé. +10.10 +Car c'est en croyant du coeur qu'on parvient à la justice, et c'est en confessant de la bouche qu'on parvient au salut, selon ce que dit l'Écriture: +10.11 +Quiconque croit en lui ne sera point confus. +10.12 +Il n'y a aucune différence, en effet, entre le Juif et le Grec, puisqu'ils ont tous un même Seigneur, qui est riche pour tous ceux qui l'invoquent. +10.13 +Car quiconque invoquera le nom du Seigneur sera sauvé. +10.14 +Comment donc invoqueront-ils celui en qui ils n'ont pas cru? Et comment croiront-ils en celui dont ils n'ont pas entendu parler? Et comment en entendront-ils parler, s'il n'y a personne qui prêche? +10.15 +Et comment y aura-t-il des prédicateurs, s'ils ne sont pas envoyés? selon qu'il est écrit: Qu'ils sont beaux Les pieds de ceux qui annoncent la paix, De ceux qui annoncent de bonnes nouvelles! +10.16 +Mais tous n'ont pas obéi à la bonne nouvelle. Aussi Ésaïe dit-il: Seigneur, Qui a cru à notre prédication? +10.17 +Ainsi la foi vient de ce qu'on entend, et ce qu'on entend vient de la parole de Christ. +10.18 +Mais je dis: N'ont-ils pas entendu? Au contraire! Leur voix est allée par toute la terre, Et leurs paroles jusqu'aux extrémités du monde. +10.19 +Mais je dis: Israël ne l'a-t-il pas su? Moïse le premier dit: J'exciterai votre jalousie par ce qui n'est point une nation, je provoquerai votre colère par une nation sans intelligence. +10.20 +Et Ésaïe pousse la hardiesse jusqu'à dire: J'ai été trouvé par ceux qui ne me cherchaient pas, Je me suis manifesté à ceux qui ne me demandaient pas. +10.21 +Mais au sujet d'Israël, il dit: J'ai tendu mes mains tout le jour vers un peuple rebelle Et contredisant. + +Romains 11 + +11.1 +Je dis donc: Dieu a-t-il rejeté son peuple? Loin de là! Car moi aussi je suis Israélite, de la postérité d'Abraham, de la tribu de Benjamin. +11.2 +Dieu n'a point rejeté son peuple, qu'il a connu d'avance. Ne savez-vous pas ce que l'Écriture rapporte d'Élie, comment il adresse à Dieu cette plainte contre Israël: +11.3 +Seigneur, ils ont tué tes prophètes, ils ont renversé tes autels; je suis resté moi seul, et ils cherchent à m'ôter la vie? +11.4 +Mais quelle réponse Dieu lui fait-il? Je me suis réservé sept mille hommes, qui n'ont point fléchi le genou devant Baal. +11.5 +De même aussi dans le temps présent il y un reste, selon l'élection de la grâce. +11.6 +Or, si c'est par grâce, ce n'est plus par les oeuvres; autrement la grâce n'est plus une grâce. Et si c'est par les oeuvres, ce n'est plus une grâce; autrement l'oeuvre n'est plus une oeuvre. +11.7 +Quoi donc? Ce qu'Israël cherche, il ne l'a pas obtenu, mais l'élection l'a obtenu, tandis que les autres ont été endurcis, +11.8 +selon qu'il est écrit: Dieu leur a donné un esprit d'assoupissement, Des yeux pour ne point voir, Et des oreilles pour ne point entendre, Jusqu'à ce jour. Et David dit: +11.9 +Que leur table soit pour eux un piège, Un filet, une occasion de chute, et une rétribution! +11.10 +Que leurs yeux soient obscurcis pour ne point voir, Et tiens leur dos continuellement courbé! +11.11 +Je dis donc: Est-ce pour tomber qu'ils ont bronché? Loin de là! Mais, par leur chute, le salut est devenu accessible aux païens, afin qu'ils fussent excités à la jalousie. +11.12 +Or, si leur chute a été la richesse du monde, et leur amoindrissement la richesse des païens, combien plus en sera-t-il ainsi quand ils se convertiront tous. +11.13 +Je vous le dis à vous, païens: en tant que je suis apôtre des païens, je glorifie mon ministère, +11.14 +afin, s'il est possible, d'exciter la jalousie de ceux de ma race, et d'en sauver quelques-uns. +11.15 +Car si leur rejet a été la réconciliation du monde, que sera leur réintégration, sinon une vie d'entre les morts? +11.16 +Or, si les prémices sont saintes, la masse l'est aussi; et si la racine est sainte, les branches le sont aussi. +11.17 +Mais si quelques-unes des branches ont été retranchées, et si toi, qui était un olivier sauvage, tu as été enté à leur place, et rendu participant de la racine et de la graisse de l'olivier, +11.18 +ne te glorifie pas aux dépens de ces branches. Si tu te glorifies, sache que ce n'est pas toi qui portes la racine, mais que c'est la racine qui te porte. +11.19 +Tu diras donc: Les branches ont été retranchées, afin que moi je fusse enté. +11.20 +Cela est vrai; elles ont été retranchées pour cause d'incrédulité, et toi, tu subsistes par la foi. Ne t'abandonne pas à l'orgueil, mais crains; +11.21 +car si Dieu n'a pas épargné les branches naturelles, il ne t'épargnera pas non plus. +11.22 +Considère donc la bonté et la sévérité de Dieu: sévérité envers ceux qui sont tombés, et bonté de Dieu envers toi, si tu demeures ferme dans cette bonté; autrement, tu seras aussi retranché. +11.23 +Eux de même, s'ils ne persistent pas dans l'incrédulité, ils seront entés; car Dieu est puissant pour les enter de nouveau. +11.24 +Si toi, tu as été coupé de l'olivier naturellement sauvage, et enté contrairement à ta nature sur l'olivier franc, à plus forte raison eux seront-ils entés selon leur nature sur leur propre olivier. +11.25 +Car je ne veux pas, frères, que vous ignoriez ce mystère, afin que vous ne vous regardiez point comme sages, c'est qu'une partie d'Israël est tombée dans l'endurcissement, jusqu'à ce que la totalité des païens soit entrée. +11.26 +Et ainsi tout Israël sera sauvé, selon qu'il est écrit: Le libérateur viendra de Sion, Et il détournera de Jacob les impiétés; +11.27 +Et ce sera mon alliance avec eux, Lorsque j'ôterai leurs péchés. +11.28 +En ce qui concerne l'Évangile, ils sont ennemis à cause de vous; mais en ce qui concerne l'élection, ils sont aimés à cause de leurs pères. +11.29 +Car Dieu ne se repent pas de ses dons et de son appel. +11.30 +De même que vous avez autrefois désobéi à Dieu et que par leur désobéissance vous avez maintenant obtenu miséricorde, +11.31 +de même ils ont maintenant désobéi, afin que, par la miséricorde qui vous a été faite, ils obtiennent aussi miséricorde. +11.32 +Car Dieu a renfermé tous les hommes dans la désobéissance, pour faire miséricorde à tous. +11.33 +O profondeur de la richesse, de la sagesse et de la science de Dieu! Que ses jugements sont insondables, et ses voies incompréhensibles! Car +11.34 +Qui a connu la pensée du Seigneur, Ou qui a été son conseiller? +11.35 +Qui lui a donné le premier, pour qu'il ait à recevoir en retour? +11.36 +C'est de lui, par lui, et pour lui que sont toutes choses. A lui la gloire dans tous les siècles! Amen! + +Romains 12 + +12.1 +Je vous exhorte donc, frères, par les compassions de Dieu, à offrir vos corps comme un sacrifice vivant, saint, agréable à Dieu, ce qui sera de votre part un culte raisonnable. +12.2 +Ne vous conformez pas au siècle présent, mais soyez transformés par le renouvellement de l'intelligence, afin que vous discerniez quelle est la volonté de Dieu, ce qui est bon, agréable et parfait. +12.3 +Par la grâce qui m'a été donnée, je dis à chacun de vous de n'avoir pas de lui-même une trop haute opinion, mais de revêtir des sentiments modestes, selon la mesure de foi que Dieu a départie à chacun. +12.4 +Car, comme nous avons plusieurs membres dans un seul corps, et que tous les membres n'ont pas la même fonction, +12.5 +ainsi, nous qui sommes plusieurs, nous formons un seul corps en Christ, et nous sommes tous membres les uns des autres. +12.6 +Puisque nous avons des dons différents, selon la grâce qui nous a été accordée, que celui qui a le don de prophétie l'exerce selon l'analogie de la foi; +12.7 +que celui qui est appelé au ministère s'attache à son ministère; que celui qui enseigne s'attache à son enseignement, +12.8 +et celui qui exhorte à l'exhortation. Que celui qui donne le fasse avec libéralité; que celui qui préside le fasse avec zèle; que celui qui pratique la miséricorde le fasse avec joie. +12.9 +Que la charité soit sans hypocrisie. Ayez le mal en horreur; attachez-vous fortement au bien. +12.10 +Par amour fraternel, soyez pleins d'affection les uns pour les autres; par honneur, usez de prévenances réciproques. +12.11 +Ayez du zèle, et non de la paresse. Soyez fervents d'esprit. Servez le Seigneur. +12.12 +Réjouissez-vous en espérance. Soyez patients dans l'affliction. Persévérez dans la prière. +12.13 +Pourvoyez aux besoins des saints. Exercez l'hospitalité. +12.14 +Bénissez ceux qui vous persécutent, bénissez et ne maudissez pas. +12.15 +Réjouissez-vous avec ceux qui se réjouissent; pleurez avec ceux qui pleurent. +12.16 +Ayez les mêmes sentiments les uns envers les autres. N'aspirez pas à ce qui est élevé, mais laissez-vous attirer par ce qui est humble. Ne soyez point sages à vos propres yeux. +12.17 +Ne rendez à personne le mal pour le mal. Recherchez ce qui est bien devant tous les hommes. +12.18 +S'il est possible, autant que cela dépend de vous, soyez en paix avec tous les hommes. +12.19 +Ne vous vengez point vous-mêmes, bien-aimés, mais laissez agir la colère; car il est écrit: A moi la vengeance, à moi la rétribution, dit le Seigneur. +12.20 +Mais si ton ennemi a faim, donne-lui à manger; s'il a soif, donne-lui à boire; car en agissant ainsi, ce sont des charbons ardents que tu amasseras sur sa tête. +12.21 +Ne te laisse pas vaincre par le mal, mais surmonte le mal par le bien. + +Romains 13 + +13.1 +Que toute personne soit soumise aux autorités supérieures; car il n'y a point d'autorité qui ne vienne de Dieu, et les autorités qui existent ont été instituées de Dieu. +13.2 +C'est pourquoi celui qui s'oppose à l'autorité résiste à l'ordre que Dieu a établi, et ceux qui résistent attireront une condamnation sur eux-mêmes. +13.3 +Ce n'est pas pour une bonne action, c'est pour une mauvaise, que les magistrats sont à redouter. Veux-tu ne pas craindre l'autorité? Fais-le bien, et tu auras son approbation. +13.4 +Le magistrat est serviteur de Dieu pour ton bien. Mais si tu fais le mal, crains; car ce n'est pas en vain qu'il porte l'épée, étant serviteur de Dieu pour exercer la vengeance et punir celui qui fait le mal. +13.5 +Il est donc nécessaire d'être soumis, non seulement par crainte de la punition, mais encore par motif de conscience. +13.6 +C'est aussi pour cela que vous payez les impôts. Car les magistrats sont des ministres de Dieu entièrement appliqués à cette fonction. +13.7 +Rendez à tous ce qui leur est dû: l'impôt à qui vous devez l'impôt, le tribut à qui vous devez le tribut, la crainte à qui vous devez la crainte, l'honneur à qui vous devez l'honneur. +13.8 +Ne devez rien à personne, si ce n'est de vous aimer les uns les autres; car celui qui aime les autres a accompli la loi. +13.9 +En effet, les commandements: Tu ne commettras point d'adultère, tu ne tueras point, tu ne déroberas point, tu ne convoiteras point, et ceux qu'il peut encore y avoir, se résument dans cette parole: Tu aimeras ton prochain comme toi-même. +13.10 +L'amour ne fait point de mal au prochain: l'amour est donc l'accomplissement de la loi. +13.11 +Cela importe d'autant plus que vous savez en quel temps nous sommes: c'est l'heure de vous réveiller enfin du sommeil, car maintenant le salut est plus près de nous que lorsque nous avons cru. +13.12 +La nuit est avancée, le jour approche. Dépouillons-nous donc des oeuvres des ténèbres, et revêtons les armes de la lumière. +13.13 +Marchons honnêtement, comme en plein jour, loin des excès et de l'ivrognerie, de la luxure et de l'impudicité, des querelles et des jalousies. +13.14 +Mais revêtez-vous du Seigneur Jésus Christ, et n'ayez pas soin de la chair pour en satisfaire les convoitises. + +Romains 14 + +14.1 +Faites accueil à celui qui est faible dans la foi, et ne discutez pas sur les opinions. +14.2 +Tel croit pouvoir manger de tout: tel autre, qui est faible, ne mange que des légumes. +14.3 +Que celui qui mange ne méprise point celui qui ne mange pas, et que celui qui ne mange pas ne juge point celui qui mange, car Dieu l'a accueilli. +14.4 +Qui es-tu, toi qui juges un serviteur d'autrui? S'il se tient debout, ou s'il tombe, cela regarde son maître. Mais il se tiendra debout, car le Seigneur a le pouvoir de l'affermir. +14.5 +Tel fait une distinction entre les jours; tel autre les estime tous égaux. Que chacun ait en son esprit une pleine conviction. +14.6 +Celui qui distingue entre les jours agit ainsi pour le Seigneur. Celui qui mange, c'est pour le Seigneur qu'il mange, car il rend grâces à Dieu; celui qui ne mange pas, c'est pour le Seigneur qu'il ne mange pas, et il rend grâces à Dieu. +14.7 +En effet, nul de nous ne vit pour lui-même, et nul ne meurt pour lui-même. +14.8 +Car si nous vivons, nous vivons pour le Seigneur; et si nous mourons, nous mourons pour le Seigneur. Soit donc que nous vivions, soit que nous mourions, nous sommes au Seigneur. +14.9 +Car Christ est mort et il a vécu, afin de dominer sur les morts et sur les vivants. +14.10 +Mais toi, pourquoi juges-tu ton frère? ou toi, pourquoi méprises-tu ton frère? puisque nous comparaîtrons tous devant le tribunal de Dieu. +14.11 +Car il est écrit: Je suis vivant, dit le Seigneur, Tout genou fléchira devant moi, Et toute langue donnera gloire à Dieu. +14.12 +Ainsi chacun de nous rendra compte à Dieu pour lui-même. +14.13 +Ne nous jugeons donc plus les uns les autres; mais pensez plutôt à ne rien faire qui soit pour votre frère une pierre d'achoppement ou une occasion de chute. +14.14 +Je sais et je suis persuadé par le Seigneur Jésus que rien n'est impur en soi, et qu'une chose n'est impure que pour celui qui la croit impure. +14.15 +Mais si, pour un aliment, ton frère est attristé, tu ne marches plus selon l'amour: ne cause pas, par ton aliment, la perte de celui pour lequel Christ est mort. +14.16 +Que votre privilège ne soit pas un sujet de calomnie. +14.17 +Car le royaume de Dieu, ce n'est pas le manger et le boire, mais la justice, la paix et la joie, par le Saint Esprit. +14.18 +Celui qui sert Christ de cette manière est agréable à Dieu et approuvé des hommes. +14.19 +Ainsi donc, recherchons ce qui contribue à la paix et à l'édification mutuelle. +14.20 +Pour un aliment, ne détruis pas l'oeuvre de Dieu. A la vérité toutes choses sont pures; mais il est mal à l'homme, quand il mange, de devenir une pierre d'achoppement. +14.21 +Il est bien de ne pas manger de viande, de ne pas boire de vin, et de s'abstenir de ce qui peut être pour ton frère une occasion de chute, de scandale ou de faiblesse. +14.22 +Cette foi que tu as, garde-la pour toi devant Dieu. Heureux celui qui ne se condamne pas lui-même dans ce qu'il approuve! +14.23 +Mais celui qui a des doutes au sujet de ce qu'il mange est condamné, parce qu'il n'agit pas par conviction. Tout ce qui n'est pas le produit d'une conviction est péché. + +Romains 15 + +15.1 +Nous qui sommes forts, nous devons supporter les faiblesses de ceux qui ne le sont pas, et ne pas nous complaire en nous-mêmes. +15.2 +Que chacun de nous complaise au prochain pour ce qui est bien en vue de l'édification. +15.3 +Car Christ ne s'est point complu en lui-même, mais, selon qu'il est écrit: Les outrages de ceux qui t'insultent sont tombés sur moi. +15.4 +Or, tout ce qui a été écrit d'avance l'a été pour notre instruction, afin que, par la patience, et par la consolation que donnent les Écritures, nous possédions l'espérance. +15.5 +Que le Dieu de la persévérance et de la consolation vous donne d'avoir les mêmes sentiments les uns envers les autres selon Jésus Christ, +15.6 +afin que tous ensemble, d'une seule bouche, vous glorifiiez le Dieu et Père de notre Seigneur Jésus Christ. +15.7 +Accueillez-vous donc les uns les autres, comme Christ vous a accueillis, pour la gloire de Dieu. +15.8 +Je dis, en effet, que Christ a été serviteur des circoncis, pour prouver la véracité de Dieu en confirmant les promesses faites aux pères, +15.9 +tandis que les païens glorifient Dieu à cause de sa miséricorde, selon qu'il est écrit: C'est pourquoi je te louerai parmi les nations, Et je chanterai à la gloire de ton nom. Il est dit encore: +15.10 +Nations, réjouissez-vous avec son peuple! +15.11 +Et encore: Louez le Seigneur, vous toutes les nations, Célébrez-le, vous tous les peuples! +15.12 +Ésaïe dit aussi: Il sortira d'Isaï un rejeton, Qui se lèvera pour régner sur les nations; Les nations espéreront en lui. +15.13 +Que le Dieu de l'espérance vous remplisse de toute joie et de toute paix dans la foi, pour que vous abondiez en espérance, par la puissance du Saint Esprit! +15.14 +Pour ce qui vous concerne, mes frères, je suis moi-même persuadé que vous êtes pleins de bonnes dispositions, remplis de toute connaissance, et capables de vous exhorter les uns les autres. +15.15 +Cependant, à certains égards, je vous ai écrit avec une sorte de hardiesse, comme pour réveiller vos souvenirs, à cause de la grâce que Dieu m'a faite +15.16 +d'être ministre de Jésus Christ parmi les païens, m'acquittant du divin service de l'Évangile de Dieu, afin que les païens lui soient une offrande agréable, étant sanctifiée par l'Esprit Saint. +15.17 +J'ai donc sujet de me glorifier en Jésus Christ, pour ce qui regarde les choses de Dieu. +15.18 +Car je n'oserais mentionner aucune chose que Christ n'ait pas faite par moi pour amener les païens à l'obéissance, par la parole et par les actes, +15.19 +par la puissance des miracles et des prodiges, par la puissance de l'Esprit de Dieu, en sorte que, depuis Jérusalem et les pays voisins jusqu'en Illyrie, j'ai abondamment répandu l'Évangile de Christ. +15.20 +Et je me suis fait honneur d'annoncer l'Évangile là où Christ n'avait point été nommé, afin de ne pas bâtir sur le fondement d'autrui, selon qu'il est écrit: +15.21 +Ceux à qui il n'avait point été annoncé verront, Et ceux qui n'en avaient point entendu parler comprendront. +15.22 +C'est ce qui m'a souvent empêché d'aller vers vous. +15.23 +Mais maintenant, n'ayant plus rien qui me retienne dans ces contrées, et ayant depuis plusieurs années le désir d'aller vers vous, +15.24 +j'espère vous voir en passant, quand je me rendrai en Espagne, et y être accompagné par vous, après que j'aurai satisfait en partie mon désir de me trouver chez vous. +15.25 +Présentement je vais à Jérusalem, pour le service des saints. +15.26 +Car la Macédoine et l'Achaïe ont bien voulu s'imposer une contribution en faveur des pauvres parmi les saints de Jérusalem. +15.27 +Elles l'ont bien voulu, et elles le leur devaient; car si les païens ont eu part à leurs avantages spirituels, ils doivent aussi les assister dans les choses temporelles. +15.28 +Dès que j'aurai terminé cette affaire et que je leur aurai remis ces dons, je partirai pour l'Espagne et passerai chez vous. +15.29 +Je sais qu'en allant vers vous, c'est avec une pleine bénédiction de Christ que j'irai. +15.30 +Je vous exhorte, frères, par notre Seigneur Jésus Christ et par l'amour de l'Esprit, à combattre avec moi, en adressant à Dieu des prières en ma faveur, +15.31 +afin que je sois délivré des incrédules de la Judée, et que les dons que je porte à Jérusalem soient agréés des saints, +15.32 +en sorte que j'arrive chez vous avec joie, si c'est la volonté de Dieu, et que je jouisse au milieu de vous de quelque repos. +15.33 +Que le Dieu de paix soit avec vous tous! Amen! + +Romains 16 + +16.1 +Je vous recommande Phoebé, notre soeur, qui est diaconesse de l'Église de Cenchrées, +16.2 +afin que vous la receviez en notre Seigneur d'une manière digne des saints, et que vous l'assistiez dans les choses où elle aurait besoin de vous, car elle en a donné aide à plusieurs et à moi-même. +16.3 +Saluez Prisca et Aquilas, mes compagnons d'oeuvre en Jésus Christ, +16.4 +qui ont exposé leur tête pour sauver ma vie; ce n'est pas moi seul qui leur rends grâces, ce sont encore toutes les Églises des païens. +16.5 +Saluez aussi l'Église qui est dans leur maison. Saluez Épaïnète, mon bien-aimé, qui a été pour Christ les prémices de l'Asie. +16.6 +Saluez Marie, qui a pris beaucoup de peine pour vous. +16.7 +Saluez Andronicus et Junias, mes parents et mes compagnons de captivité, qui jouissent d'une grande considération parmi les apôtres, et qui même ont été en Christ avant moi. +16.8 +Saluez Amplias, mon bien-aimé dans le Seigneur. +16.9 +Saluez Urbain, notre compagnon d'oeuvre en Christ, et Stachys, mon bien-aimé. +16.10 +Saluez Apellès, qui est éprouvé en Christ. Saluez ceux de la maison d'Aristobule. +16.11 +Saluez Hérodion, mon parent. Saluez ceux de la maison de Narcisse qui sont dans le Seigneur. +16.12 +Saluez Tryphène et Tryphose, qui travaillent pour le Seigneur. Saluez Perside, la bien-aimée, qui a beaucoup travaillé pour le Seigneur. +16.13 +Saluez Rufus, l'élu du Seigneur, et sa mère, qui est aussi la mienne. +16.14 +Saluez Asyncrite, Phlégon, Hermès, Patrobas, Hermas, et les frères qui sont avec eux. +16.15 +Saluez Philologue et Julie, Nérée et sa soeur, et Olympe, et tous les saints qui sont avec eux. +16.16 +Saluez-vous les uns les autres par un saint baiser. Toutes les Églises de Christ vous saluent. +16.17 +Je vous exhorte, frères, à prendre garde à ceux qui causent des divisions et des scandales, au préjudice de l'enseignement que vous avez reçu. Éloignez-vous d'eux. +16.18 +Car de tels hommes ne servent point Christ notre Seigneur, mais leur propre ventre; et, par des paroles douces et flatteuses, ils séduisent les coeurs des simples. +16.19 +Pour vous, votre obéissance est connue de tous; je me réjouis donc à votre sujet, et je désire que vous soyez sages en ce qui concerne le bien et purs en ce qui concerne le mal. +16.20 +Le Dieu de paix écrasera bientôt Satan sous vos pieds. Que la grâce de notre Seigneur Jésus Christ soit avec vous! +16.21 +Timothée, mon compagnon d'oeuvre, vous salue, ainsi que Lucius, Jason et Sosipater, mes parents. +16.22 +Je vous salue dans le Seigneur, moi Tertius, qui ai écrit cette lettre. +16.23 +Gaïus, mon hôte et celui de toute l'Église, vous salue. Éraste, le trésorier de la ville, vous salue, ainsi que le frère Quartus. +16.24 +Que la grâce de notre Seigneur Jésus Christ soit avec vous tous! Amen! +16.25 +A celui qui peut vous affermir selon mon Évangile et la prédication de Jésus Christ, conformément à la révélation du mystère caché pendant des siècles, +16.26 +mais manifesté maintenant par les écrits des prophètes, d'après l'ordre du Dieu éternel, et porté à la connaissance de toutes les nations, afin qu'elles obéissent à la foi, +16.27 +à Dieu, seul sage, soit la gloire aux siècles des siècles, par Jésus Christ! Amen! diff --git a/test/ws_perf_SUITE_data/japanese.txt b/test/ws_perf_SUITE_data/japanese.txt new file mode 100644 index 0000000..780764f --- /dev/null +++ b/test/ws_perf_SUITE_data/japanese.txt @@ -0,0 +1,41 @@ +JAP + +1 + +天と地の創造 + +1まだ何もなかった時、神は天と地を造りました。 2地は形も定まらず、闇に包まれた水の上を、さらに神の霊が覆っていました。 + +3「光よ、輝き出よ。」神が言われると、光がさっとさしてきました。 4-5それを見て、神は大いに満足し、光と闇とを区別しました。しばらくの間、光は輝き続け、やがて、もう一度闇に覆われました。神は光を「昼」、闇を「夜」と名づけました。こうして昼と夜ができて、一日目が終わりました。 + + +6「もやは上下に分かれ、空と海になれ」と神が言われると、 7-8そのとおり水蒸気が二つに分かれ、空ができました。こうして二日目も終わりました。 + +9-10「空の下の水は集まって海となり、乾いた地が現れ出よ。」こう神が言われると、そのとおりになりました。神は乾いた地を「陸地」、水の部分を「海」と名づけました。それを見て満足すると、 11-12神はまた言われました。「陸地には、あらゆる種類の草、種のある植物、実のなる木が生えよ。それぞれの種から同じ種類の草や木が生えるようになれ。」すると、そのとおりになり、神は満足しました。 13これが三日目です。 + +14-15神のことばはさらに続きます。「空に光が輝き、地を照らせ。その光で、昼と夜の区別、季節の変化、一日や一年の区切りをつけよ。」すると、そのとおりになりました。 16こうして、地を照らす太陽と月ができました。太陽は大きく明るいので昼を、月は夜を治めました。このほかにも、星々が造られました。 17神はそれをみな空にちりばめ、地を照らすようにしました。 18こうして昼と夜を分け終えると、神は満足しました。 19ここまでが四日目の出来事です。 + +20神は再び言われました。「海は魚やその他の生き物であふれ、空はあらゆる種類の鳥で満ちよ。」 21-22神は海に住む大きな生き物をはじめ、あらゆる種類の魚と鳥を造りました。みなすばらしいものばかりです。神はそれを見て、「海いっぱいに満ちよ。鳥たちは地を覆うまでに増えよ」と祝福しました。 23これが五日目です。 + +24次に神は言われました。「地は、家畜や地をはうもの、野の獣など、あらゆる種類の生き物を生み出せ。」そのとおりになりました。 25神が造った生き物は、どれも満足のいくものばかりでした。 + +26そして最後に、神はこう言われました。「さあ、人間を造ろう。地と空と海のあらゆる生き物を治めさせるために、われわれに最も近い、われわれのかたちに似せて人間を造ろう。」 27このように人間は、天地を造った神の特性を持つ者として、男と女とに創造されました。 + +28神は人間を祝福して言われました。「地に増え広がり、大地を治めよ。あなたがたは、魚と鳥とすべての動物の主人なのだ。 29全地に生える種のある植物を見てみなさい。みなあなたがたのものだ。実のなる木もすべて与えるから、好きなだけ食べるがいい。 30また、動物や鳥にも、あらゆる草と植物を彼らの食物として与える。」 31神はでき上がった世界を隅から隅まで見渡しました。とてもすばらしい世界が広がっていました。こうして六日目が終わりました。 + + +2 + +1ついに全世界が完成しました。 2すべてを創造し終えると、神は七日目には休まれ、 3この日を祝福して、聖なる日と定めました。この日、天地創造の働きが完了したからです。 + +人間の創造 + +人間の創造 + +人間の創造 + +人間の創造 + +人間の創造 + +人間の創造. |