aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Makefile17
-rw-r--r--build.config30
-rw-r--r--erlang.mk1716
-rw-r--r--src/cowboy.erl16
-rw-r--r--src/cowboy_http2.erl452
-rw-r--r--src/cowboy_stream.erl53
-rw-r--r--src/cowboy_tls.erl44
7 files changed, 1913 insertions, 415 deletions
diff --git a/Makefile b/Makefile
index 10629d3..ca3c1be 100644
--- a/Makefile
+++ b/Makefile
@@ -4,14 +4,15 @@ PROJECT = cowboy
# Options.
-COMPILE_FIRST = cowboy_middleware cowboy_sub_protocol
-CT_OPTS += -pa test -ct_hooks cowboy_ct_hook [] # -boot start_sasl
+COMPILE_FIRST = cowboy_middleware cowboy_stream cowboy_sub_protocol
+CT_OPTS += -ct_hooks cowboy_ct_hook [] # -boot start_sasl
PLT_APPS = crypto public_key ssl
+CI_OTP = OTP-18.0-rc2 # OTP-17.1.2 OTP-17.2.2 OTP-17.3.4 OTP-17.4.1 OTP-17.5.3
# Dependencies.
DEPS = cowlib ranch
-dep_cowlib = git https://github.com/ninenines/cowlib 1.3.0
+dep_cowlib = git https://github.com/ninenines/cowlib master
TEST_DEPS = ct_helper gun
dep_ct_helper = git https://github.com/extend/ct_helper.git master
@@ -27,6 +28,16 @@ TEST_ERLC_OPTS += +'{parse_transform, eunit_autoexport}'
# DIALYZER_OPTS += --src -r test
+# Open logs after CI ends.
+
+ci::
+ $(gen_verbose) xdg-open logs/all_runs.html
+
+# Use erl_make_certs from the tested release.
+
+ci-setup::
+ $(gen_verbose) cp ~/.kerl/builds/$(CI_OTP_RELEASE)/otp_src_git/lib/ssl/test/erl_make_certs.erl deps/ct_helper/src/
+
# Documentation.
dep_ezdoc = git https://github.com/ninenines/ezdoc master
diff --git a/build.config b/build.config
index cae6cf3..6cdfbb4 100644
--- a/build.config
+++ b/build.config
@@ -1,20 +1,36 @@
-# Core modules.
+# Do *not* comment or remove core modules
+# unless you know what you are doing.
#
-# Do *not* comment or remove them
-# unless you know what you are doing!
+# Feel free to comment plugins out however.
+
+# Core modules.
core/core
core/deps
+
+# Plugins that must run before Erlang code gets compiled.
+plugins/protobuffs
+
+# Core modules, continued.
core/erlc
+core/docs
+core/test
# Plugins.
-#
-# Comment to disable, uncomment to enable.
+plugins/asciidoc
plugins/bootstrap
-#plugins/c_src
+plugins/c_src
+plugins/ci
plugins/ct
plugins/dialyzer
#plugins/edoc
-#plugins/elvis
+plugins/elvis
plugins/erlydtl
+plugins/escript
+plugins/eunit
plugins/relx
plugins/shell
+plugins/triq
+plugins/xref
+
+# Plugins enhancing the functionality of other plugins.
+plugins/cover
diff --git a/erlang.mk b/erlang.mk
index e6833bc..1e561aa 100644
--- a/erlang.mk
+++ b/erlang.mk
@@ -12,9 +12,9 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-.PHONY: all deps app rel docs tests clean distclean help erlang-mk
+.PHONY: all deps app rel docs install-docs tests check clean distclean help erlang-mk
-ERLANG_MK_VERSION = 1
+ERLANG_MK_VERSION = 1.2.0-564-g178e214-dirty
# Core configuration.
@@ -28,10 +28,42 @@ V ?= 0
gen_verbose_0 = @echo " GEN " $@;
gen_verbose = $(gen_verbose_$(V))
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
# "erl" command.
ERL = erl +A0 -noinput -boot start_clean
+# Platform detection.
+# @todo Add Windows/Cygwin detection eventually.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
# Core targets.
ifneq ($(words $(MAKECMDGOALS)),1)
@@ -46,6 +78,8 @@ all:: deps
rel::
@echo -n
+check:: clean app tests
+
clean:: clean-crashdump
clean-crashdump:
@@ -60,34 +94,61 @@ help::
"erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
"Copyright (c) 2013-2014 Loïc Hoguin <[email protected]>" \
"" \
- "Usage: [V=1] make [-jNUM] [target]" \
+ "Usage: [V=1] $(MAKE) [-jNUM] [target]" \
"" \
"Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " deps Fetch dependencies (if needed) and compile them" \
- " app Compile the project" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
+ " all Run deps, app and rel targets in that order" \
+ " deps Fetch dependencies (if needed) and compile them" \
+ " app Compile the project" \
+ " rel Build a release for this project, if applicable" \
+ " docs Build the documentation for this project" \
+ " install-docs Install the man pages for this project" \
+ " tests Run the tests for this project" \
+ " check Compile and run all tests and analysis for this project" \
+ " clean Delete temporary and output files from most targets" \
+ " distclean Delete all temporary and output files" \
+ " help Display this help and exit" \
"" \
"The target clean only removes files that are commonly removed." \
"Dependencies and releases are left untouched." \
"" \
- "Setting V=1 when calling make enables verbose mode." \
- "Parallel execution is supported through the -j Make flag."
+ "Setting V=1 when calling $(MAKE) enables verbose mode." \
+ "Parallel execution is supported through the -j $(MAKE) flag."
# Core functions.
+define newline
+
+
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) -pa $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
define core_http_get
wget --no-check-certificate -O $(1) $(2)|| rm $(1)
endef
else
+define core_http_get.erl
+ ssl:start(),
+ inets:start(),
+ case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+ {ok, {{_, 200, _}, _, Body}} ->
+ case file:write_file("$(1)", Body) of
+ ok -> ok;
+ {error, R1} -> halt(R1)
+ end;
+ {error, R2} ->
+ halt(R2)
+ end,
+ halt(0).
+endef
+
define core_http_get
- $(ERL) -eval 'ssl:start(), inets:start(), case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of {ok, {{_, 200, _}, _, Body}} -> case file:write_file("$(1)", Body) of ok -> ok; {error, R1} -> halt(R1) end; {error, R2} -> halt(R2) end, halt(0).'
+ $(call erlang,$(call core_http_get.erl,$(1),$(2)))
endef
endif
@@ -99,7 +160,7 @@ ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
erlang-mk:
git clone https://github.com/ninenines/erlang.mk $(ERLANG_MK_BUILD_DIR)
if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR); fi
- cd $(ERLANG_MK_BUILD_DIR) && make
+ cd $(ERLANG_MK_BUILD_DIR) && $(MAKE)
cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
rm -rf $(ERLANG_MK_BUILD_DIR)
@@ -110,8 +171,7 @@ erlang-mk:
# Configuration.
-AUTOPATCH ?= edown gen_leader gproc
-export AUTOPATCH
+IGNORE_DEPS ?=
DEPS_DIR ?= $(CURDIR)/deps
export DEPS_DIR
@@ -119,7 +179,7 @@ export DEPS_DIR
REBAR_DEPS_DIR = $(DEPS_DIR)
export REBAR_DEPS_DIR
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DEPS))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(filter-out $(IGNORE_DEPS),$(DEPS)))
ifeq ($(filter $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
ifeq ($(ERL_LIBS),)
@@ -135,65 +195,452 @@ export PKG_FILE2
PKG_FILE_URL ?= https://raw.githubusercontent.com/ninenines/erlang.mk/master/packages.v2.tsv
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP " $(1);
+dep_verbose = $(dep_verbose_$(V))
+
# Core targets.
+ifneq ($(SKIP_DEPS),)
+deps::
+else
deps:: $(ALL_DEPS_DIRS)
@for dep in $(ALL_DEPS_DIRS) ; do \
if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ] ; then \
- $(MAKE) -C $$dep ; \
+ $(MAKE) -C $$dep IS_DEP=1 || exit $$? ; \
else \
- echo "include $(CURDIR)/erlang.mk" | ERLC_OPTS=+debug_info $(MAKE) -f - -C $$dep ; \
+ echo "ERROR: No Makefile to build dependency $$dep." ; \
+ exit 1 ; \
fi ; \
done
+endif
distclean:: distclean-deps distclean-pkg
# Deps related targets.
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
define dep_autopatch
- $(ERL) -eval " \
-DepDir = \"$(DEPS_DIR)/$(1)/\", \
-fun() -> \
- {ok, Conf} = file:consult(DepDir ++ \"rebar.config\"), \
- File = case lists:keyfind(deps, 1, Conf) of false -> []; {_, Deps} -> \
- [begin {Method, Repo, Commit} = case Repos of \
- {git, R} -> {git, R, master}; \
- {M, R, {branch, C}} -> {M, R, C}; \
- {M, R, {tag, C}} -> {M, R, C}; \
- {M, R, C} -> {M, R, C} \
- end, \
- io_lib:format(\"DEPS += ~s\ndep_~s = ~s ~s ~s~n\", [Name, Name, Method, Repo, Commit]) \
- end || {Name, _, Repos} <- Deps] \
- end, \
- ok = file:write_file(\"$(DEPS_DIR)/$(1)/Makefile\", [\"ERLC_OPTS = +debug_info\n\n\", File, \"\ninclude erlang.mk\"]) \
-end(), \
-AppSrcOut = \"$(DEPS_DIR)/$(1)/src/$(1).app.src\", \
-AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> \"$(DEPS_DIR)/$(1)/ebin/$(1).app\"; true -> AppSrcOut end, \
-fun() -> \
- {ok, [{application, $(1), L}]} = file:consult(AppSrcIn), \
- L2 = case lists:keyfind(modules, 1, L) of {_, _} -> L; false -> [{modules, []}|L] end, \
- L3 = case lists:keyfind(vsn, 1, L2) of {vsn, git} -> lists:keyreplace(vsn, 1, L2, {vsn, \"git\"}); _ -> L2 end, \
- ok = file:write_file(AppSrcOut, io_lib:format(\"~p.~n\", [{application, $(1), L3}])) \
-end(), \
-case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end, \
-halt()."
+ if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+ if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+ $(call dep_autopatch2,$(1)); \
+ elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+ $(call dep_autopatch2,$(1)); \
+ elif [ 0 != `find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk | xargs grep -ci rebar` ]; then \
+ $(call dep_autopatch2,$(1)); \
+ else \
+ if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+ $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+ $(call dep_autopatch_erlang_mk,$(1)); \
+ else \
+ $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+ fi \
+ fi \
+ else \
+ if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+ $(call dep_autopatch_noop,$(1)); \
+ else \
+ $(call dep_autopatch2,$(1)); \
+ fi \
+ fi
endef
-ifeq ($(V),0)
-define dep_autopatch_verbose
- @echo " PATCH " $(1);
+define dep_autopatch2
+ $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+ if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+ $(call dep_autopatch_fetch_rebar); \
+ $(call dep_autopatch_rebar,$(1)); \
+ else \
+ $(call dep_autopatch_gen,$(1)); \
+ fi
+endef
+
+define dep_autopatch_noop
+ printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+ rm -f $(DEPS_DIR)/$(1)/erlang.mk; \
+ cd $(DEPS_DIR)/$(1)/ && ln -s ../../erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+ echo -n
endef
endif
+define dep_autopatch_gen
+ printf "%s\n" \
+ "ERLC_OPTS = +debug_info" \
+ "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+ mkdir -p $(ERLANG_MK_TMP); \
+ if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+ git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+ cd $(ERLANG_MK_TMP)/rebar; \
+ git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+ make; \
+ cd -; \
+ fi
+endef
+
+define dep_autopatch_rebar
+ if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+ mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+ fi; \
+ $(call erlang,$(call dep_autopatch_rebar.erl,$(1)))
+endef
+
+define dep_autopatch_rebar.erl
+ application:set_env(rebar, log_level, debug),
+ Conf1 = case file:consult("$(DEPS_DIR)/$(1)/rebar.config") of
+ {ok, Conf0} -> Conf0;
+ _ -> []
+ end,
+ {Conf, OsEnv} = fun() ->
+ case filelib:is_file("$(DEPS_DIR)/$(1)/rebar.config.script") of
+ false -> {Conf1, []};
+ true ->
+ Bindings0 = erl_eval:new_bindings(),
+ Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+ Bindings = erl_eval:add_binding('SCRIPT', "$(DEPS_DIR)/$(1)/rebar.config.script", Bindings1),
+ Before = os:getenv(),
+ {ok, Conf2} = file:script("$(DEPS_DIR)/$(1)/rebar.config.script", Bindings),
+ {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+ end
+ end(),
+ Write = fun (Text) ->
+ file:write_file("$(DEPS_DIR)/$(1)/Makefile", Text, [append])
+ end,
+ Escape = fun (Text) ->
+ re:replace(Text, "\\\\$$$$", "\$$$$$$$$", [global, {return, list}])
+ end,
+ Write("IGNORE_DEPS = edown eper eunit_formatters meck node_package "
+ "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+ Write("C_SRC_DIR = /path/do/not/exist\n"),
+ Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+ Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+ fun() ->
+ Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+ case lists:keyfind(erl_opts, 1, Conf) of
+ false -> ok;
+ {_, ErlOpts} ->
+ lists:foreach(fun
+ ({d, D}) ->
+ Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+ ({i, I}) ->
+ Write(["ERLC_OPTS += -I ", I, "\n"]);
+ ({platform_define, Regex, D}) ->
+ case rebar_utils:is_arch(Regex) of
+ true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+ false -> ok
+ end;
+ ({parse_transform, PT}) ->
+ Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+ (_) -> ok
+ end, ErlOpts)
+ end,
+ Write("\n")
+ end(),
+ fun() ->
+ File = case lists:keyfind(deps, 1, Conf) of
+ false -> [];
+ {_, Deps} ->
+ [begin case case Dep of
+ {N, S} when is_tuple(S) -> {N, S};
+ {N, _, S} -> {N, S};
+ {N, _, S, _} -> {N, S};
+ _ -> false
+ end of
+ false -> ok;
+ {Name, Source} ->
+ {Method, Repo, Commit} = case Source of
+ {git, R} -> {git, R, master};
+ {M, R, {branch, C}} -> {M, R, C};
+ {M, R, {tag, C}} -> {M, R, C};
+ {M, R, C} -> {M, R, C}
+ end,
+ Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+ end end || Dep <- Deps]
+ end
+ end(),
+ fun() ->
+ case lists:keyfind(erl_first_files, 1, Conf) of
+ false -> ok;
+ {_, Files} ->
+ Names = [[" ", case lists:reverse(F) of
+ "lre." ++ Elif -> lists:reverse(Elif);
+ Elif -> lists:reverse(Elif)
+ end] || "src/" ++ F <- Files],
+ Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+ end
+ end(),
+ FindFirst = fun(F, Fd) ->
+ case io:parse_erl_form(Fd, undefined) of
+ {ok, {attribute, _, compile, {parse_transform, PT}}, _} ->
+ [PT, F(F, Fd)];
+ {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) ->
+ case proplists:get_value(parse_transform, CompileOpts) of
+ undefined -> [F(F, Fd)];
+ PT -> [PT, F(F, Fd)]
+ end;
+ {ok, {attribute, _, include, Hrl}, _} ->
+ case file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]) of
+ {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)];
+ _ ->
+ case file:open("$(DEPS_DIR)/$(1)/src/" ++ Hrl, [read]) of
+ {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)];
+ _ -> [F(F, Fd)]
+ end
+ end;
+ {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} ->
+ {ok, HrlFd} = file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]),
+ [F(F, HrlFd), F(F, Fd)];
+ {ok, {attribute, _, include_lib, Hrl}, _} ->
+ case file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]) of
+ {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)];
+ _ -> [F(F, Fd)]
+ end;
+ {ok, {attribute, _, import, {Imp, _}}, _} ->
+ case file:open("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(Imp) ++ ".erl", [read]) of
+ {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)];
+ _ -> [F(F, Fd)]
+ end;
+ {eof, _} ->
+ file:close(Fd),
+ [];
+ _ ->
+ F(F, Fd)
+ end
+ end,
+ fun() ->
+ ErlFiles = filelib:wildcard("$(DEPS_DIR)/$(1)/src/*.erl"),
+ First0 = lists:usort(lists:flatten([begin
+ {ok, Fd} = file:open(F, [read]),
+ FindFirst(FindFirst, Fd)
+ end || F <- ErlFiles])),
+ First = lists:flatten([begin
+ {ok, Fd} = file:open("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", [read]),
+ FindFirst(FindFirst, Fd)
+ end || M <- First0, lists:member("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0,
+ Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First,
+ lists:member("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"])
+ end(),
+ Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+ Write("\npreprocess::\n"),
+ Write("\npre-deps::\n"),
+ Write("\npre-app::\n"),
+ PatchHook = fun(Cmd) ->
+ case Cmd of
+ "make -C" ++ _ -> Escape(Cmd);
+ "gmake -C" ++ _ -> Escape(Cmd);
+ "make " ++ Cmd1 -> "make -f Makefile.orig.mk " ++ Escape(Cmd1);
+ "gmake " ++ Cmd1 -> "gmake -f Makefile.orig.mk " ++ Escape(Cmd1);
+ _ -> Escape(Cmd)
+ end
+ end,
+ fun() ->
+ case lists:keyfind(pre_hooks, 1, Conf) of
+ false -> ok;
+ {_, Hooks} ->
+ [case H of
+ {'get-deps', Cmd} ->
+ Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+ {compile, Cmd} ->
+ Write("\npre-app::\n\tCC=$$$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+ {Regex, compile, Cmd} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> Write("\npre-app::\n\tCC=$$$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+ false -> ok
+ end;
+ _ -> ok
+ end || H <- Hooks]
+ end
+ end(),
+ ShellToMk = fun(V) ->
+ re:replace(re:replace(V, "(\\\\$$$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+ "-Werror\\\\b", "", [{return, list}, global])
+ end,
+ PortSpecs = fun() ->
+ case lists:keyfind(port_specs, 1, Conf) of
+ false ->
+ case filelib:is_dir("$(DEPS_DIR)/$(1)/c_src") of
+ false -> [];
+ true ->
+ [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+ proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+ end;
+ {_, Specs} ->
+ lists:flatten([case S of
+ {Output, Input} -> {ShellToMk(Output), Input, []};
+ {Regex, Output, Input} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {ShellToMk(Output), Input, []};
+ false -> []
+ end;
+ {Regex, Output, Input, [{env, Env}]} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {ShellToMk(Output), Input, Env};
+ false -> []
+ end
+ end || S <- Specs])
+ end
+ end(),
+ PortSpecWrite = fun (Text) ->
+ file:write_file("$(DEPS_DIR)/$(1)/c_src/Makefile.erlang.mk", Text, [append])
+ end,
+ case PortSpecs of
+ [] -> ok;
+ _ ->
+ Write("\npre-app::\n\t$$$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+ PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n",
+ [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+ PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n",
+ [code:lib_dir(erl_interface, lib)])),
+ [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+ FilterEnv = fun(Env) ->
+ lists:flatten([case E of
+ {_, _} -> E;
+ {Regex, K, V} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {K, V};
+ false -> []
+ end
+ end || E <- Env])
+ end,
+ MergeEnv = fun(Env) ->
+ lists:foldl(fun ({K, V}, Acc) ->
+ case lists:keyfind(K, 1, Acc) of
+ false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+ {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+ end
+ end, [], Env)
+ end,
+ PortEnv = case lists:keyfind(port_env, 1, Conf) of
+ false -> [];
+ {_, PortEnv0} -> FilterEnv(PortEnv0)
+ end,
+ PortSpec = fun ({Output, Input0, Env}) ->
+ filelib:ensure_dir("$(DEPS_DIR)/$(1)/" ++ Output),
+ Input = [[" ", I] || I <- Input0],
+ PortSpecWrite([
+ [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+ case $(PLATFORM) of
+ darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+ _ -> ""
+ end,
+ "\n\nall:: ", Output, "\n\n",
+ "%.o: %.c\n\t$$$$\(CC) -c -o $$$$\@ $$$$\< $$$$\(CFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n",
+ "%.o: %.C\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n",
+ "%.o: %.cc\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n",
+ "%.o: %.cpp\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n",
+ [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+ Output, ": $$$$\(foreach ext,.c .C .cc .cpp,",
+ "$$$$\(patsubst %$$$$\(ext),%.o,$$$$\(filter %$$$$\(ext),$$$$\(wildcard", Input, "))))\n",
+ "\t$$$$\(CC) -o $$$$\@ $$$$\? $$$$\(LDFLAGS) $$$$\(ERL_LDFLAGS) $$$$\(DRV_LDFLAGS) $$$$\(EXE_LDFLAGS)",
+ case filename:extension(Output) of
+ [] -> "\n";
+ _ -> " -shared\n"
+ end])
+ end,
+ [PortSpec(S) || S <- PortSpecs]
+ end,
+ Write("\ninclude ../../erlang.mk"),
+ RunPlugin = fun(Plugin, Step) ->
+ case erlang:function_exported(Plugin, Step, 2) of
+ false -> ok;
+ true ->
+ c:cd("$(DEPS_DIR)/$(1)/"),
+ Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+ dict:store(base_dir, "", dict:new())}, undefined),
+ io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+ end
+ end,
+ fun() ->
+ case lists:keyfind(plugins, 1, Conf) of
+ false -> ok;
+ {_, Plugins} ->
+ [begin
+ case lists:keyfind(deps, 1, Conf) of
+ false -> ok;
+ {_, Deps} ->
+ case lists:keyfind(P, 1, Deps) of
+ false -> ok;
+ _ ->
+ Path = "$(DEPS_DIR)/" ++ atom_to_list(P),
+ io:format("~s", [os:cmd("$(MAKE) -C $(DEPS_DIR)/$(1) " ++ Path)]),
+ io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+ code:add_patha(Path ++ "/ebin")
+ end
+ end
+ end || P <- Plugins],
+ [case code:load_file(P) of
+ {module, P} -> ok;
+ _ ->
+ case lists:keyfind(plugin_dir, 1, Conf) of
+ false -> ok;
+ {_, PluginsDir} ->
+ ErlFile = "$(DEPS_DIR)/$(1)/" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+ {ok, P, Bin} = compile:file(ErlFile, [binary]),
+ {module, P} = code:load_binary(P, ErlFile, Bin)
+ end
+ end || P <- Plugins],
+ [RunPlugin(P, preprocess) || P <- Plugins],
+ [RunPlugin(P, pre_compile) || P <- Plugins]
+ end
+ end(),
+ halt()
+endef
+
+define dep_autopatch_app.erl
+ UpdateModules = fun(App) ->
+ case filelib:is_regular(App) of
+ false -> ok;
+ true ->
+ {ok, [{application, $(1), L0}]} = file:consult(App),
+ Mods = filelib:fold_files("$(DEPS_DIR)/$(1)/src", "\\\\.erl$$$$", true,
+ fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+ L = lists:keystore(modules, 1, L0, {modules, Mods}),
+ ok = file:write_file(App, io_lib:format("~p.~n", [{application, $(1), L}]))
+ end
+ end,
+ UpdateModules("$(DEPS_DIR)/$(1)/ebin/$(1).app"),
+ halt()
+endef
+
+define dep_autopatch_appsrc.erl
+ AppSrcOut = "$(DEPS_DIR)/$(1)/src/$(1).app.src",
+ AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(DEPS_DIR)/$(1)/ebin/$(1).app"; true -> AppSrcOut end,
+ case filelib:is_regular(AppSrcIn) of
+ false -> ok;
+ true ->
+ {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+ L1 = lists:keystore(modules, 1, L0, {modules, []}),
+ L2 = case lists:keyfind(vsn, 1, L1) of {vsn, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+ ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L2}])),
+ case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+ end,
+ halt()
+endef
+
define dep_fetch
if [ "$$$$VS" = "git" ]; then \
- git clone -n -- $$$$REPO $(DEPS_DIR)/$(1); \
+ git clone -q -n -- $$$$REPO $(DEPS_DIR)/$(1); \
cd $(DEPS_DIR)/$(1) && git checkout -q $$$$COMMIT; \
elif [ "$$$$VS" = "hg" ]; then \
- hg clone -U $$$$REPO $(DEPS_DIR)/$(1); \
+ hg clone -q -U $$$$REPO $(DEPS_DIR)/$(1); \
cd $(DEPS_DIR)/$(1) && hg update -q $$$$COMMIT; \
elif [ "$$$$VS" = "svn" ]; then \
- svn checkout $$$$REPO $(DEPS_DIR)/$(1); \
+ svn checkout -q $$$$REPO $(DEPS_DIR)/$(1); \
+ elif [ "$$$$VS" = "cp" ]; then \
+ cp -R $$$$REPO $(DEPS_DIR)/$(1); \
else \
echo "Unknown or invalid dependency: $(1). Please consult the erlang.mk README for instructions." >&2; \
exit 78; \
@@ -205,25 +652,41 @@ $(DEPS_DIR)/$(1):
@mkdir -p $(DEPS_DIR)
ifeq (,$(dep_$(1)))
@if [ ! -f $(PKG_FILE2) ]; then $(call core_http_get,$(PKG_FILE2),$(PKG_FILE_URL)); fi
- @DEPPKG=$$$$(awk 'BEGIN { FS = "\t" }; $$$$1 == "$(1)" { print $$$$2 " " $$$$3 " " $$$$4 }' $(PKG_FILE2);); \
+ $(dep_verbose) DEPPKG=$$$$(awk 'BEGIN { FS = "\t" }; $$$$1 == "$(1)" { print $$$$2 " " $$$$3 " " $$$$4 }' $(PKG_FILE2);); \
VS=$$$$(echo $$$$DEPPKG | cut -d " " -f1); \
REPO=$$$$(echo $$$$DEPPKG | cut -d " " -f2); \
COMMIT=$$$$(echo $$$$DEPPKG | cut -d " " -f3); \
$(call dep_fetch,$(1))
else
- @VS=$(word 1,$(dep_$(1))); \
+ifeq (1,$(words $(dep_$(1))))
+ $(dep_verbose) VS=git; \
+ REPO=$(dep_$(1)); \
+ COMMIT=master; \
+ $(call dep_fetch,$(1))
+else
+ifeq (2,$(words $(dep_$(1))))
+ $(dep_verbose) VS=git; \
+ REPO=$(word 1,$(dep_$(1))); \
+ COMMIT=$(word 2,$(dep_$(1))); \
+ $(call dep_fetch,$(1))
+else
+ $(dep_verbose) VS=$(word 1,$(dep_$(1))); \
REPO=$(word 2,$(dep_$(1))); \
COMMIT=$(word 3,$(dep_$(1))); \
$(call dep_fetch,$(1))
endif
-ifneq ($(filter $(1),$(AUTOPATCH)),)
- $(call dep_autopatch_verbose,$(1)) if [ -f $(DEPS_DIR)/$(1)/rebar.config ]; then \
- $(call dep_autopatch,$(1)); \
- cd $(DEPS_DIR)/$(1)/ && ln -s ../../erlang.mk; \
- elif [ ! -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- echo "ERLC_OPTS = +debug_info\ninclude erlang.mk" > $(DEPS_DIR)/$(1)/Makefile; \
- cd $(DEPS_DIR)/$(1)/ && ln -s ../../erlang.mk; \
+endif
+endif
+ @if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ]; then \
+ echo " AUTO " $(1); \
+ cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+ fi
+ -@if [ -f $(DEPS_DIR)/$(1)/configure ]; then \
+ echo " CONF " $(1); \
+ cd $(DEPS_DIR)/$(1) && ./configure; \
fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+ @$(call dep_autopatch,$(1))
endif
endef
@@ -253,7 +716,7 @@ pkg-search: $(PKG_FILE2)
"Description:\t" $$6 "\n" }'
else
pkg-search:
- $(error Usage: make pkg-search q=STRING)
+ $(error Usage: $(MAKE) pkg-search q=STRING)
endif
ifeq ($(PKG_FILE2),$(CURDIR)/.erlang.mk.packages.v2)
@@ -267,6 +730,38 @@ help::
" pkg-list List all known packages" \
" pkg-search q=STRING Search for STRING in the package index"
+# Copyright (c) 2015, Loïc Hoguin <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+ @mkdir -p ebin/ include/
+ $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+ $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+ @rm ebin/*.erl
+endef
+
+define compile_proto.erl
+ [begin
+ Dir = filename:dirname(filename:dirname(F)),
+ protobuffs_compile:generate_source(F,
+ [{output_include_dir, Dir ++ "/include"},
+ {output_src_dir, Dir ++ "/ebin"}])
+ end || F <- string:tokens("$(1)", " ")],
+ halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(shell find src -type f -name \*.proto 2>/dev/null)
+ $(if $(strip $?),$(call compile_proto,$?))
+endif
+
# Copyright (c) 2013-2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -297,6 +792,9 @@ erlc_verbose = $(erlc_verbose_$(V))
xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
xyrl_verbose = $(xyrl_verbose_$(V))
+asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
+asn1_verbose = $(asn1_verbose_$(V))
+
mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
mib_verbose = $(mib_verbose_$(V))
@@ -338,6 +836,13 @@ define compile_xyrl
@rm ebin/*.erl
endef
+define compile_asn1
+ $(asn1_verbose) erlc -v -I include/ -o ebin/ $(1)
+ @mv ebin/*.hrl include/
+ @mv ebin/*.asn1db include/
+ @rm ebin/*.erl
+endef
+
define compile_mib
$(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ \
-I priv/mibs/ $(COMPILE_MIB_FIRST_PATHS) $(1)
@@ -348,18 +853,22 @@ ifneq ($(wildcard src/),)
ebin/$(PROJECT).app::
@mkdir -p ebin/
+ifneq ($(wildcard asn1/),)
+ebin/$(PROJECT).app:: $(shell find asn1 -type f -name \*.asn1)
+ @mkdir -p include
+ $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
ifneq ($(wildcard mibs/),)
ebin/$(PROJECT).app:: $(shell find mibs -type f -name \*.mib)
@mkdir -p priv/mibs/ include
$(if $(strip $?),$(call compile_mib,$?))
endif
-ebin/$(PROJECT).app:: $(shell find src -type f -name \*.erl) \
- $(shell find src -type f -name \*.core)
+ebin/$(PROJECT).app:: $(shell find src -type f -name \*.erl -o -name \*.core)
$(if $(strip $?),$(call compile_erl,$?))
-ebin/$(PROJECT).app:: $(shell find src -type f -name \*.xrl) \
- $(shell find src -type f -name \*.yrl)
+ebin/$(PROJECT).app:: $(shell find src -type f -name \*.xrl -o -name \*.yrl)
$(if $(strip $?),$(call compile_xyrl,$?))
endif
@@ -369,6 +878,26 @@ clean-app:
$(gen_verbose) rm -rf ebin/ priv/mibs/ \
$(addprefix include/,$(addsuffix .hrl,$(notdir $(basename $(wildcard mibs/*.mib)))))
+# Copyright (c) 2015, Viktor Söderqvist <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+ @for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
# Copyright (c) 2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -376,7 +905,7 @@ clean-app:
# Configuration.
-TEST_DIR ?= test
+TEST_DIR ?= $(CURDIR)/test
ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
@@ -387,8 +916,12 @@ TEST_ERLC_OPTS += -DTEST=1
$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
test-deps: $(ALL_TEST_DEPS_DIRS)
@for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
ifneq ($(strip $(TEST_DIR)),)
test-dir:
@@ -397,13 +930,13 @@ test-dir:
endif
ifeq ($(wildcard ebin/test),)
-test-build: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build: clean deps test-deps
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
@$(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
$(gen_verbose) touch ebin/test
else
-test-build: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build: deps test-deps
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps
@$(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
endif
@@ -414,6 +947,53 @@ ifneq ($(wildcard $(TEST_DIR)/*.beam),)
$(gen_verbose) rm -f $(TEST_DIR)/*.beam
endif
+# Copyright (c) 2015, Loïc Hoguin <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide:
+ a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+ a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual:
+ for f in doc/src/manual/*.asciidoc ; do \
+ a2x -v -f manpage $$f ; \
+ done
+ for s in $(MAN_SECTIONS); do \
+ mkdir -p doc/man$$s/ ; \
+ mv doc/src/manual/*.$$s doc/man$$s/ ; \
+ gzip doc/man$$s/*.$$s ; \
+ done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+ for s in $(MAN_SECTIONS); do \
+ mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+ install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+ done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+ $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
# Copyright (c) 2014-2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -432,285 +1012,337 @@ help::
# Bootstrap templates.
-bs_appsrc = "{application, $(PROJECT), [" \
- " {description, \"\"}," \
- " {vsn, \"0.1.0\"}," \
- " {id, \"git\"}," \
- " {modules, []}," \
- " {registered, []}," \
- " {applications, [" \
- " kernel," \
- " stdlib" \
- " ]}," \
- " {mod, {$(PROJECT)_app, []}}," \
- " {env, []}" \
- "]}."
-bs_appsrc_lib = "{application, $(PROJECT), [" \
- " {description, \"\"}," \
- " {vsn, \"0.1.0\"}," \
- " {id, \"git\"}," \
- " {modules, []}," \
- " {registered, []}," \
- " {applications, [" \
- " kernel," \
- " stdlib" \
- " ]}" \
- "]}."
-bs_Makefile = "PROJECT = $(PROJECT)" \
- "include erlang.mk"
-bs_app = "-module($(PROJECT)_app)." \
- "-behaviour(application)." \
- "" \
- "-export([start/2])." \
- "-export([stop/1])." \
- "" \
- "start(_Type, _Args) ->" \
- " $(PROJECT)_sup:start_link()." \
- "" \
- "stop(_State) ->" \
- " ok."
-bs_relx_config = "{release, {$(PROJECT)_release, \"1\"}, [$(PROJECT)]}." \
- "{extended_start_script, true}." \
- "{sys_config, \"rel/sys.config\"}." \
- "{vm_args, \"rel/vm.args\"}."
-bs_sys_config = "[" \
- "]."
-bs_vm_args = "-name $(PROJECT)@127.0.0.1" \
- "-setcookie $(PROJECT)" \
- "-heart"
+define bs_appsrc
+{application, $(PROJECT), [
+ {description, ""},
+ {vsn, "0.1.0"},
+ {id, "git"},
+ {modules, []},
+ {registered, []},
+ {applications, [
+ kernel,
+ stdlib
+ ]},
+ {mod, {$(PROJECT)_app, []}},
+ {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $(PROJECT), [
+ {description, ""},
+ {vsn, "0.1.0"},
+ {id, "git"},
+ {modules, []},
+ {registered, []},
+ {applications, [
+ kernel,
+ stdlib
+ ]}
+]}.
+endef
+
+define bs_Makefile
+PROJECT = $(PROJECT)
+include erlang.mk
+endef
+
+define bs_app
+-module($(PROJECT)_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+ $(PROJECT)_sup:start_link().
+
+stop(_State) ->
+ ok.
+endef
+
+define bs_relx_config
+{release, {$(PROJECT)_release, "1"}, [$(PROJECT)]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $(PROJECT)@127.0.0.1
+-setcookie $(PROJECT)
+-heart
+endef
+
# Normal templates.
-tpl_supervisor = "-module($(n))." \
- "-behaviour(supervisor)." \
- "" \
- "-export([start_link/0])." \
- "-export([init/1])." \
- "" \
- "start_link() ->" \
- " supervisor:start_link({local, ?MODULE}, ?MODULE, [])." \
- "" \
- "init([]) ->" \
- " Procs = []," \
- " {ok, {{one_for_one, 1, 5}, Procs}}."
-tpl_gen_server = "-module($(n))." \
- "-behaviour(gen_server)." \
- "" \
- "%% API." \
- "-export([start_link/0])." \
- "" \
- "%% gen_server." \
- "-export([init/1])." \
- "-export([handle_call/3])." \
- "-export([handle_cast/2])." \
- "-export([handle_info/2])." \
- "-export([terminate/2])." \
- "-export([code_change/3])." \
- "" \
- "-record(state, {" \
- "})." \
- "" \
- "%% API." \
- "" \
- "-spec start_link() -> {ok, pid()}." \
- "start_link() ->" \
- " gen_server:start_link(?MODULE, [], [])." \
- "" \
- "%% gen_server." \
- "" \
- "init([]) ->" \
- " {ok, \#state{}}." \
- "" \
- "handle_call(_Request, _From, State) ->" \
- " {reply, ignored, State}." \
- "" \
- "handle_cast(_Msg, State) ->" \
- " {noreply, State}." \
- "" \
- "handle_info(_Info, State) ->" \
- " {noreply, State}." \
- "" \
- "terminate(_Reason, _State) ->" \
- " ok." \
- "" \
- "code_change(_OldVsn, State, _Extra) ->" \
- " {ok, State}."
-tpl_gen_fsm = "-module($(n))." \
- "-behaviour(gen_fsm)." \
- "" \
- "%% API." \
- "-export([start_link/0])." \
- "" \
- "%% gen_fsm." \
- "-export([init/1])." \
- "-export([state_name/2])." \
- "-export([handle_event/3])." \
- "-export([state_name/3])." \
- "-export([handle_sync_event/4])." \
- "-export([handle_info/3])." \
- "-export([terminate/3])." \
- "-export([code_change/4])." \
- "" \
- "-record(state, {" \
- "})." \
- "" \
- "%% API." \
- "" \
- "-spec start_link() -> {ok, pid()}." \
- "start_link() ->" \
- " gen_fsm:start_link(?MODULE, [], [])." \
- "" \
- "%% gen_fsm." \
- "" \
- "init([]) ->" \
- " {ok, state_name, \#state{}}." \
- "" \
- "state_name(_Event, StateData) ->" \
- " {next_state, state_name, StateData}." \
- "" \
- "handle_event(_Event, StateName, StateData) ->" \
- " {next_state, StateName, StateData}." \
- "" \
- "state_name(_Event, _From, StateData) ->" \
- " {reply, ignored, state_name, StateData}." \
- "" \
- "handle_sync_event(_Event, _From, StateName, StateData) ->" \
- " {reply, ignored, StateName, StateData}." \
- "" \
- "handle_info(_Info, StateName, StateData) ->" \
- " {next_state, StateName, StateData}." \
- "" \
- "terminate(_Reason, _StateName, _StateData) ->" \
- " ok." \
- "" \
- "code_change(_OldVsn, StateName, StateData, _Extra) ->" \
- " {ok, StateName, StateData}."
-tpl_cowboy_http = "-module($(n))." \
- "-behaviour(cowboy_http_handler)." \
- "" \
- "-export([init/3])." \
- "-export([handle/2])." \
- "-export([terminate/3])." \
- "" \
- "-record(state, {" \
- "})." \
- "" \
- "init(_, Req, _Opts) ->" \
- " {ok, Req, \#state{}}." \
- "" \
- "handle(Req, State=\#state{}) ->" \
- " {ok, Req2} = cowboy_req:reply(200, Req)," \
- " {ok, Req2, State}." \
- "" \
- "terminate(_Reason, _Req, _State) ->" \
- " ok."
-tpl_cowboy_loop = "-module($(n))." \
- "-behaviour(cowboy_loop_handler)." \
- "" \
- "-export([init/3])." \
- "-export([info/3])." \
- "-export([terminate/3])." \
- "" \
- "-record(state, {" \
- "})." \
- "" \
- "init(_, Req, _Opts) ->" \
- " {loop, Req, \#state{}, 5000, hibernate}." \
- "" \
- "info(_Info, Req, State) ->" \
- " {loop, Req, State, hibernate}." \
- "" \
- "terminate(_Reason, _Req, _State) ->" \
- " ok."
-tpl_cowboy_rest = "-module($(n))." \
- "" \
- "-export([init/3])." \
- "-export([content_types_provided/2])." \
- "-export([get_html/2])." \
- "" \
- "init(_, _Req, _Opts) ->" \
- " {upgrade, protocol, cowboy_rest}." \
- "" \
- "content_types_provided(Req, State) ->" \
- " {[{{<<\"text\">>, <<\"html\">>, '*'}, get_html}], Req, State}." \
- "" \
- "get_html(Req, State) ->" \
- " {<<\"<html><body>This is REST!</body></html>\">>, Req, State}."
-tpl_cowboy_ws = "-module($(n))." \
- "-behaviour(cowboy_websocket_handler)." \
- "" \
- "-export([init/3])." \
- "-export([websocket_init/3])." \
- "-export([websocket_handle/3])." \
- "-export([websocket_info/3])." \
- "-export([websocket_terminate/3])." \
- "" \
- "-record(state, {" \
- "})." \
- "" \
- "init(_, _, _) ->" \
- " {upgrade, protocol, cowboy_websocket}." \
- "" \
- "websocket_init(_, Req, _Opts) ->" \
- " Req2 = cowboy_req:compact(Req)," \
- " {ok, Req2, \#state{}}." \
- "" \
- "websocket_handle({text, Data}, Req, State) ->" \
- " {reply, {text, Data}, Req, State};" \
- "websocket_handle({binary, Data}, Req, State) ->" \
- " {reply, {binary, Data}, Req, State};" \
- "websocket_handle(_Frame, Req, State) ->" \
- " {ok, Req, State}." \
- "" \
- "websocket_info(_Info, Req, State) ->" \
- " {ok, Req, State}." \
- "" \
- "websocket_terminate(_Reason, _Req, _State) ->" \
- " ok."
-tpl_ranch_protocol = "-module($(n))." \
- "-behaviour(ranch_protocol)." \
- "" \
- "-export([start_link/4])." \
- "-export([init/4])." \
- "" \
- "-type opts() :: []." \
- "-export_type([opts/0])." \
- "" \
- "-record(state, {" \
- " socket :: inet:socket()," \
- " transport :: module()" \
- "})." \
- "" \
- "start_link(Ref, Socket, Transport, Opts) ->" \
- " Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts])," \
- " {ok, Pid}." \
- "" \
- "-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok." \
- "init(Ref, Socket, Transport, _Opts) ->" \
- " ok = ranch:accept_ack(Ref)," \
- " loop(\#state{socket=Socket, transport=Transport})." \
- "" \
- "loop(State) ->" \
- " loop(State)."
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+ Procs = [],
+ {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+ {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+ {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+ {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+ {ok, Req2} = cowboy_req:reply(200, Req),
+ {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+ {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+ {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+ {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+ {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+ {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+ {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+ ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+ {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+ {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+ {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+ {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+ {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+ {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+ {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+ Req2 = cowboy_req:compact(Req),
+ {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+ {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+ {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+ {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+ {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+ socket :: inet:socket(),
+ transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+ Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+ {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+ ok = ranch:accept_ack(Ref),
+ loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+ loop(State).
+endef
# Plugin-specific targets.
+define render_template
+ @echo "$${$(1)}" > $(2)
+endef
+
+$(foreach template,$(filter bs_%,$(.VARIABLES)),$(eval export $(template)))
+$(foreach template,$(filter tpl_%,$(.VARIABLES)),$(eval export $(template)))
+
bootstrap:
ifneq ($(wildcard src/),)
$(error Error: src/ directory already exists)
endif
- @printf "%s\n" $(bs_Makefile) > Makefile
+ $(call render_template,bs_Makefile,Makefile)
@mkdir src/
- @printf "%s\n" $(bs_appsrc) > src/$(PROJECT).app.src
- @printf "%s\n" $(bs_app) > src/$(PROJECT)_app.erl
+ $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+ $(call render_template,bs_app,src/$(PROJECT)_app.erl)
$(eval n := $(PROJECT)_sup)
- @printf "%s\n" $(tpl_supervisor) > src/$(PROJECT)_sup.erl
+ $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
bootstrap-lib:
ifneq ($(wildcard src/),)
$(error Error: src/ directory already exists)
endif
- @printf "%s\n" $(bs_Makefile) > Makefile
+ $(call render_template,bs_Makefile,Makefile)
@mkdir src/
- @printf "%s\n" $(bs_appsrc_lib) > src/$(PROJECT).app.src
+ $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
bootstrap-rel:
ifneq ($(wildcard relx.config),)
@@ -719,25 +1351,25 @@ endif
ifneq ($(wildcard rel/),)
$(error Error: rel/ directory already exists)
endif
- @printf "%s\n" $(bs_relx_config) > relx.config
+ $(call render_template,bs_relx_config,relx.config)
@mkdir rel/
- @printf "%s\n" $(bs_sys_config) > rel/sys.config
- @printf "%s\n" $(bs_vm_args) > rel/vm.args
+ $(call render_template,bs_sys_config,rel/sys.config)
+ $(call render_template,bs_vm_args,rel/vm.args)
new:
ifeq ($(wildcard src/),)
$(error Error: src/ directory does not exist)
endif
ifndef t
- $(error Usage: make new t=TEMPLATE n=NAME)
+ $(error Usage: $(MAKE) new t=TEMPLATE n=NAME)
endif
ifndef tpl_$(t)
$(error Unknown template)
endif
ifndef n
- $(error Usage: make new t=TEMPLATE n=NAME)
+ $(error Usage: $(MAKE) new t=TEMPLATE n=NAME)
endif
- @printf "%s\n" $(tpl_$(t)) > src/$(n).erl
+ $(call render_template,tpl_$(t),src/$(n).erl)
list-templates:
@echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
@@ -746,27 +1378,26 @@ list-templates:
# This file is part of erlang.mk and subject to the terms of the ISC License.
.PHONY: clean-c_src distclean-c_src-env
-# todo
# Configuration.
-C_SRC_DIR = $(CURDIR)/c_src
+C_SRC_DIR ?= $(CURDIR)/c_src
C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so
+C_SRC_TYPE ?= shared
# System type and C compiler/flags.
-UNAME_SYS := $(shell uname -s)
-ifeq ($(UNAME_SYS), Darwin)
+ifeq ($(PLATFORM),darwin)
CC ?= cc
CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(UNAME_SYS), FreeBSD)
+else ifeq ($(PLATFORM),freebsd)
CC ?= cc
CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(UNAME_SYS), Linux)
+else ifeq ($(PLATFORM),linux)
CC ?= gcc
CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
CXXFLAGS ?= -O3 -finline-functions -Wall
@@ -776,7 +1407,10 @@ CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR)
CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR)
LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei
+
+ifeq ($(C_SRC_TYPE),shared)
LDFLAGS += -shared
+endif
# Verbosity.
@@ -793,14 +1427,21 @@ link_verbose = $(link_verbose_$(V))
ifeq ($(wildcard $(C_SRC_DIR)),)
else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app::
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
$(MAKE) -C $(C_SRC_DIR)
clean::
$(MAKE) -C $(C_SRC_DIR) clean
else
+
+ifeq ($(SOURCES),)
SOURCES := $(shell find $(C_SRC_DIR) -type f \( -name "*.c" -o -name "*.C" -o -name "*.cc" -o -name "*.cpp" \))
+endif
OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
@@ -808,6 +1449,8 @@ COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
app:: $(C_SRC_ENV) $(C_SRC_OUTPUT)
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT)
+
$(C_SRC_OUTPUT): $(OBJECTS)
@mkdir -p priv/
$(link_verbose) $(CC) $(OBJECTS) $(LDFLAGS) $(LDLIBS) -o $(C_SRC_OUTPUT)
@@ -824,6 +1467,14 @@ $(C_SRC_OUTPUT): $(OBJECTS)
%.o: %.cpp
$(COMPILE_CPP) $(OUTPUT_OPTION) $<
+clean:: clean-c_src
+
+clean-c_src:
+ $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
$(C_SRC_ENV):
@$(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \
io_lib:format( \
@@ -835,11 +1486,6 @@ $(C_SRC_ENV):
code:lib_dir(erl_interface, lib)])), \
halt()."
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS)
-
distclean:: distclean-c_src-env
distclean-c_src-env:
@@ -848,6 +1494,72 @@ distclean-c_src-env:
-include $(C_SRC_ENV)
endif
+# Copyright (c) 2015, Loïc Hoguin <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(KERL) $(addprefix ci-,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+ -$(ci_verbose) \
+ PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+ CI_OTP_RELEASE="$(1)" \
+ CT_OPTS="-label $(1)" \
+ $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+$(CI_INSTALL_DIR)/$(1):
+ $(KERL) build git $(OTP_GIT) $(1) $(1)
+ $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+define kerl_fetch
+ $(call core_http_get,$(KERL),$(KERL_URL))
+ chmod +x $(KERL)
+endef
+
+$(KERL):
+ @$(call kerl_fetch)
+
+help::
+ @printf "%s\n" "" \
+ "Continuous Integration targets:" \
+ " ci Run '$(MAKE) tests' on all configured Erlang versions." \
+ "" \
+ "The CI_OTP variable must be defined with the Erlang versions" \
+ "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+ $(gen_verbose) rm -rf $(KERL)
+endif
+
# Copyright (c) 2013-2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -881,28 +1593,28 @@ help::
CT_RUN = ct_run \
-no_auto_compile \
-noinput \
- -pa ebin $(DEPS_DIR)/*/ebin \
+ -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \
-dir $(TEST_DIR) \
- -logdir logs
+ -logdir $(CURDIR)/logs
ifeq ($(CT_SUITES),)
ct:
else
ct: test-build
- @mkdir -p logs/
+ @mkdir -p $(CURDIR)/logs/
$(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
endif
define ct_suite_target
ct-$(1): test-build
- @mkdir -p logs/
+ @mkdir -p $(CURDIR)/logs/
$(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS)
endef
$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
distclean-ct:
- $(gen_verbose) rm -rf logs/
+ $(gen_verbose) rm -rf $(CURDIR)/logs/
# Copyright (c) 2013-2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -921,6 +1633,8 @@ DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \
# Core targets.
+check:: dialyze
+
distclean:: distclean-plt
help::
@@ -946,35 +1660,6 @@ dialyze: $(DIALYZER_PLT)
endif
@dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS)
-# Copyright (c) 2013-2015, Loïc Hoguin <[email protected]>
-# Copyright (c) 2015, Viktor Söderqvist <[email protected]>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc build-doc-deps
-
-# Configuration.
-
-EDOC_OPTS ?=
-
-# Core targets.
-
-docs:: distclean-edoc build-doc-deps
- $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-build-doc-deps: $(DOC_DEPS_DIRS)
- @for dep in $(DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-
-distclean-edoc:
- $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
-
# Copyright (c) 2014, Juan Facorro <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -1120,22 +1805,19 @@ distclean-escript:
# Configuration
+# All modules in TEST_DIR
ifeq ($(strip $(TEST_DIR)),)
-TAGGED_EUNIT_TESTS = {dir,"ebin"}
-else
-ifeq ($(wildcard $(TEST_DIR)),)
-TAGGED_EUNIT_TESTS = {dir,"ebin"}
+TEST_DIR_MODS =
else
-# All modules in TEST_DIR
TEST_DIR_MODS = $(notdir $(basename $(shell find $(TEST_DIR) -type f -name *.beam)))
+endif
+
# All modules in 'ebin'
EUNIT_EBIN_MODS = $(notdir $(basename $(shell find ebin -type f -name *.beam)))
# Only those modules in TEST_DIR with no matching module in 'ebin'.
# This is done to avoid some tests being executed twice.
EUNIT_MODS = $(filter-out $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(TEST_DIR_MODS))
-TAGGED_EUNIT_TESTS = {dir,"ebin"} $(foreach mod,$(EUNIT_MODS),$(shell echo $(mod) | sed -e 's/\(.*\)/{module,\1}/g'))
-endif
-endif
+TAGGED_EUNIT_TESTS = $(foreach mod,$(EUNIT_EBIN_MODS) $(EUNIT_MODS),{module,$(mod)})
EUNIT_OPTS ?=
@@ -1156,10 +1838,16 @@ help::
# Plugin-specific targets.
+EUNIT_RUN_BEFORE ?=
+EUNIT_RUN_AFTER ?=
EUNIT_RUN = $(ERL) \
-pa $(TEST_DIR) $(DEPS_DIR)/*/ebin \
-pz ebin \
- -eval 'case eunit:test([$(call str-join,$(TAGGED_EUNIT_TESTS))], [$(EUNIT_OPTS)]) of ok -> halt(0); error -> halt(1) end.'
+ $(EUNIT_RUN_BEFORE) \
+ -eval 'case eunit:test([$(call str-join,$(TAGGED_EUNIT_TESTS))],\
+ [$(EUNIT_OPTS)]) of ok -> ok; error -> halt(1) end.' \
+ $(EUNIT_RUN_AFTER) \
+ -eval 'halt(0).'
eunit: test-build
$(gen_verbose) $(EUNIT_RUN)
@@ -1167,7 +1855,7 @@ eunit: test-build
# Copyright (c) 2013-2015, Loïc Hoguin <[email protected]>
# This file is part of erlang.mk and subject to the terms of the ISC License.
-.PHONY: relx-rel distclean-relx-rel distclean-relx
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
# Configuration.
@@ -1176,7 +1864,7 @@ RELX_CONFIG ?= $(CURDIR)/relx.config
RELX ?= $(CURDIR)/relx
export RELX
-RELX_URL ?= https://github.com/erlware/relx/releases/download/v1.2.0/relx
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v2.0.0/relx
RELX_OPTS ?=
RELX_OUTPUT_DIR ?= _rel
@@ -1188,9 +1876,11 @@ endif
# Core targets.
+ifeq ($(IS_DEP),)
ifneq ($(wildcard $(RELX_CONFIG)),)
rel:: distclean-relx-rel relx-rel
endif
+endif
distclean:: distclean-relx-rel distclean-relx
@@ -1213,6 +1903,31 @@ distclean-relx-rel:
distclean-relx:
$(gen_verbose) rm -rf $(RELX)
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+ {ok, Config} = file:consult("$(RELX_CONFIG)"),
+ {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+ io:format("~s", [Name]),
+ halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+ @$(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+ @printf "%s\n" "" \
+ "Relx targets:" \
+ " run Compile the project, build the release and run it"
+
+endif
+
# Copyright (c) 2014, M Robert Martin <[email protected]>
# This file is contributed to erlang.mk and subject to the terms of the ISC License.
@@ -1230,7 +1945,7 @@ ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
help::
@printf "%s\n" "" \
"Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
+ " shell Run an erlang shell with SHELL_OPTS or reasonable default"
# Plugin-specific targets.
@@ -1252,24 +1967,215 @@ ifneq ($(wildcard $(DEPS_DIR)/triq),)
tests:: triq
-define triq_run
-$(ERL) -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin \
- -eval "try $(1) of true -> halt(0); _ -> halt(1) catch error:undef -> io:format(\"Undefined property or module~n\"), halt() end."
+define triq_check.erl
+ code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+ try
+ case $(1) of
+ all -> [true] =:= lists:usort([triq:check(M) || M <- [$(MODULES)]]);
+ module -> triq:check($(2));
+ function -> triq:check($(2))
+ end
+ of
+ true -> halt(0);
+ _ -> halt(1)
+ catch error:undef ->
+ io:format("Undefined property or module~n"),
+ halt(0)
+ end.
endef
ifdef t
ifeq (,$(findstring :,$(t)))
triq: test-build
- @$(call triq_run,triq:check($(t)))
+ @$(call erlang,$(call triq_check.erl,module,$(t)))
else
triq: test-build
@echo Testing $(t)/0
- @$(call triq_run,triq:check($(t)()))
+ @$(call erlang,$(call triq_check.erl,function,$(t)()))
endif
else
triq: test-build
$(eval MODULES := $(shell find ebin -type f -name \*.beam \
| sed "s/ebin\//'/;s/\.beam/',/" | sed '$$s/.$$//'))
- $(gen_verbose) $(call triq_run,[true] =:= lists:usort([triq:check(M) || M <- [$(MODULES)]]))
+ $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined))
+endif
+endif
+
+# Copyright (c) 2015, Euen Lopez <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+ XREF_ARGS :=
+else
+ XREF_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.0/xrefr
+
+# Core targets.
+
+help::
+ @printf "%s\n" "" \
+ "Xref targets:" \
+ " xref Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+ @$(call core_http_get,$(XREFR),$(XREFR_URL))
+ @chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+ $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+ $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <[email protected]>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# utility variables for representing special symbols
+empty :=
+space := $(empty) $(empty)
+comma := ,
+
+# Hook in coverage to eunit
+
+ifdef COVER
+ifdef EUNIT_RUN
+EUNIT_RUN_BEFORE += -eval \
+ 'case cover:compile_beam_directory("ebin") of \
+ {error, _} -> halt(1); \
+ _ -> ok \
+ end.'
+EUNIT_RUN_AFTER += -eval 'cover:export("eunit.coverdata").'
+endif
+endif
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(shell echo ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+ @echo Cover mods: $(COVER_MODS)
+ $(gen_verbose) printf "%s\n" \
+ '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+ '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+ @$(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
endif
+
+help::
+ @printf "%s\n" "" \
+ "Cover targets:" \
+ " cover-report Generate a HTML coverage report from previously collected" \
+ " cover data." \
+ " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+ "" \
+ "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+ "target tests additionally generates a HTML coverage report from the combined" \
+ "coverdata files from each of these testing tools. HTML reports can be disabled" \
+ "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+ $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+ $(gen_verbose) $(ERL) -eval ' \
+ $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+ cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+ $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+ grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+ | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+ $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+ Ms = cover:imported_modules(),
+ [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+ ++ ".COVER.html", [html]) || M <- Ms],
+ Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+ EunitHrlMods = [$(EUNIT_HRL_MODS)],
+ Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+ true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+ TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+ TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+ TotalPerc = round(100 * TotalY / (TotalY + TotalN)),
+ {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+ io:format(F, "<!DOCTYPE html><html>~n"
+ "<head><meta charset=\"UTF-8\">~n"
+ "<title>Coverage report</title></head>~n"
+ "<body>~n", []),
+ io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+ io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+ [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+ "<td>~p%</td></tr>~n",
+ [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1],
+ How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+ Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+ io:format(F, "</table>~n"
+ "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+ "</body></html>", [How, Date]),
+ halt().
+endef
+
+cover-report:
+ $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+ $(gen_verbose) $(call erlang,$(cover_report.erl))
+
endif
+endif # ifneq ($(COVER_REPORT_DIR),)
diff --git a/src/cowboy.erl b/src/cowboy.erl
index af9f1b3..1f0e2a9 100644
--- a/src/cowboy.erl
+++ b/src/cowboy.erl
@@ -17,9 +17,13 @@
-export([start_http/4]).
-export([start_https/4]).
-export([start_spdy/4]).
+-export([start_tls/4]).
-export([stop_listener/1]).
-export([set_env/3]).
+-type opts() :: map().
+-export_type([opts/0]).
+
-type fields() :: [atom()
| {atom(), cowboy_constraints:constraint() | [cowboy_constraints:constraint()]}
| {atom(), cowboy_constraints:constraint() | [cowboy_constraints:constraint()], any()}].
@@ -64,6 +68,18 @@ start_spdy(Ref, NbAcceptors, TransOpts, ProtoOpts)
ranch:start_listener(Ref, NbAcceptors,
ranch_ssl, TransOpts2, cowboy_spdy, ProtoOpts).
+-spec start_tls(ranch:ref(), non_neg_integer(), ranch_ssl:opts(), opts()) -> {ok, pid()} | {error, any()}.
+start_tls(Ref, NbAcceptors, TransOpts0, ProtoOpts)
+ when is_integer(NbAcceptors), NbAcceptors > 0 ->
+ {_, Type} = maps:get(stream_handler, ProtoOpts, {cowboy_stream_h, supervisor}),
+ TransOpts = [
+ {connection_type, Type},
+ {next_protocols_advertised, [<<"h2">>, <<"spdy/3">>, <<"http/1.1">>]},
+ {alpn_preferred_protocols, [<<"h2">>, <<"spdy/3">>, <<"http/1.1">>]}
+ |TransOpts0],
+ ranch:start_listener(Ref, NbAcceptors,
+ ranch_ssl, TransOpts, cowboy_tls, ProtoOpts).
+
-spec stop_listener(ranch:ref()) -> ok | {error, not_found}.
stop_listener(Ref) ->
ranch:stop_listener(Ref).
diff --git a/src/cowboy_http2.erl b/src/cowboy_http2.erl
new file mode 100644
index 0000000..e0fe30e
--- /dev/null
+++ b/src/cowboy_http2.erl
@@ -0,0 +1,452 @@
+%% Copyright (c) 2015, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_http2).
+
+-export([init/6]).
+
+-export([system_continue/3]).
+-export([system_terminate/4]).
+-export([system_code_change/4]).
+
+-record(stream, {
+ id = undefined :: cowboy_stream:streamid(),
+ state = undefined :: any(),
+ %% Whether we finished sending data.
+ local = nofin :: cowboy_stream:fin(),
+ %% Whether we finished receiving data.
+ remote = nofin :: cowboy_stream:fin()
+}).
+
+-type stream() :: #stream{}.
+
+%% @todo priority: if we receive a message for a stream, do a selective receive
+%% to get all messages in the mailbox and prioritize them. (later)
+
+-record(state, {
+ parent = undefined :: pid(),
+ ref :: ranch:ref(),
+ socket = undefined :: inet:socket(),
+ transport :: module(),
+ opts = #{} :: map(),
+ handler :: module(),
+
+ %% Settings are separate for each endpoint. In addition, settings
+ %% must be acknowledged before they can be expected to be applied.
+ %%
+ %% @todo Since the ack is required, we must timeout if we don't receive it.
+ %% @todo I haven't put as much thought as I should have on this,
+ %% the final settings handling will be very different.
+ local_settings = #{} :: map(),
+ next_settings = #{} :: undefined | map(), %% @todo perhaps set to undefined by default
+ remote_settings = #{} :: map(),
+
+ %% Stream identifiers.
+ server_streamid = 2 :: pos_integer(),
+ %% @todo last known good streamid
+
+ %% Currently active HTTP/2 streams. Streams may be initiated either
+ %% by the client or by the server through PUSH_PROMISE frames.
+ streams = [] :: [stream()],
+
+ %% Streams can spawn zero or more children which are then managed
+ %% by this module if operating as a supervisor.
+ children = [] :: [{pid(), cowboy_stream:streamid()}],
+
+ %% The client starts by sending a sequence of bytes as a preface,
+ %% followed by a potentially empty SETTINGS frame. Then the connection
+ %% is established and continues normally. An exception is when a HEADERS
+ %% frame is sent followed by CONTINUATION frames: no other frame can be
+ %% sent in between.
+ parse_state = preface :: preface | settings | normal
+ | {continuation, cowboy_stream:streamid(), cowboy_stream:fin(), binary()},
+
+ %% HPACK decoding and encoding state.
+ decode_state = cow_hpack:init() :: cow_hpack:state(),
+ encode_state = cow_hpack:init() :: cow_hpack:state()
+}).
+
+-spec init(pid(), ranch:ref(), inet:socket(), module(), opts(), module()) -> ok.
+init(Parent, Ref, Socket, Transport, Opts, Handler) ->
+ before_loop(#state{parent=Parent, ref=Ref, socket=Socket,
+ transport=Transport, opts=Opts, handler=Handler}, <<>>).
+
+%% @todo Add the timeout for last time since we heard of connection.
+before_loop(State, Buffer) ->
+ loop(State, Buffer).
+
+loop(State=#state{parent=Parent, socket=Socket, transport=Transport,
+ handler=Handler, children=Children}, Buffer) ->
+ Transport:setopts(Socket, [{active, once}]),
+ {OK, Closed, Error} = Transport:messages(),
+ receive
+ %% Socket messages.
+ {OK, Socket, Data} ->
+ parse(State, << Buffer/binary, Data/binary >>);
+ {Closed, Socket} ->
+ terminate(State, {socket_error, closed, 'The socket has been closed.'});
+ {Error, Socket, Reason} ->
+ terminate(State, {socket_error, Reason, 'An error has occurred on the socket.'});
+ %% System messages.
+ {'EXIT', Parent, Reason} ->
+ exit(Reason);
+ {system, From, Request} ->
+ sys:handle_system_msg(Request, From, Parent, ?MODULE, [], {State, Buffer});
+ %% Messages pertaining to a stream.
+ {{Handler, StreamID}, Msg} ->
+ loop(info(State, StreamID, Msg), Buffer);
+ %% Exit signal from children.
+ Msg = {'EXIT', Pid, _} ->
+ loop(down(State, Pid, Msg), Buffer);
+ %% Calls from supervisor module.
+ {'$gen_call', {From, Tag}, which_children} ->
+ Workers = [{?MODULE, Pid, worker, [?MODULE]} || {Pid, _} <- Children],
+ From ! {Tag, Workers},
+ loop(State, Buffer);
+ {'$gen_call', {From, Tag}, count_children} ->
+ NbChildren = length(Children),
+ Counts = [{specs, 1}, {active, NbChildren},
+ {supervisors, 0}, {workers, NbChildren}],
+ From ! {Tag, Counts},
+ loop(State, Buffer);
+ {'$gen_call', {From, Tag}, _} ->
+ From ! {Tag, {error, ?MODULE}},
+ loop(State, Buffer);
+ Msg ->
+ error_logger:error_msg("Received stray message ~p.", [Msg]),
+ loop(State, Buffer)
+ %% @todo Configurable timeout.
+ after 60000 ->
+ terminate(State, {internal_error, timeout, 'No message or data received before timeout.'})
+ end.
+
+parse(State=#state{socket=Socket, transport=Transport, next_settings=Settings, parse_state=preface}, Data) ->
+ case Data of
+ << "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", Rest/bits >> ->
+ %% @todo To speed up connection we may be able to construct the frame when starting the listener.
+ %% We send next_settings and use defaults until we get a ack.
+ Transport:send(Socket, cow_http2:settings(Settings)),
+ parse(State#state{parse_state=settings}, Rest);
+ _ when byte_size(Data) >= 24 ->
+ Transport:close(Socket),
+ exit({shutdown, {connection_error, protocol_error,
+ 'The connection preface was invalid. (RFC7540 3.5)'}});
+ _ ->
+ before_loop(State, Data)
+ end;
+%% @todo Perhaps instead of just more we can have {more, Len} to avoid all the checks.
+parse(State=#state{parse_state=ParseState}, Data) ->
+ case cow_http2:parse(Data) of
+ {ok, Frame, Rest} ->
+ case ParseState of
+ normal -> parse(frame(State, Frame), Rest);
+ settings -> parse(frame(State, Frame), Rest);
+ _ -> parse(continuation_frame(State, Frame), Rest)
+ end;
+ {stream_error, StreamID, Reason, Human, Rest} ->
+ parse(stream_reset(State, StreamID, {stream_error, Reason, Human}), Rest);
+ Error = {connection_error, _, _} ->
+ terminate(State, Error);
+ more ->
+ before_loop(State, Data)
+ end.
+
+%% @todo When we get a 'fin' we need to check if the stream had a 'fin' sent back
+%% and terminate the stream if this is the end of it.
+
+%% DATA frame.
+frame(State=#state{handler=Handler, streams=Streams0}, {data, StreamID, IsFin, Data}) ->
+ case lists:keyfind(StreamID, #stream.id, Streams0) of
+ Stream = #stream{state=StreamState0} -> %% @todo in=open
+ try Handler:data(StreamID, IsFin, Data, StreamState0) of
+ {Commands, StreamState} ->
+ Streams = lists:keyreplace(StreamID, #stream.id, Streams0,
+ Stream#stream{state=StreamState}),
+ commands(State#state{streams=Streams}, StreamID, Commands)
+ catch Class:Reason ->
+ error_logger:error_msg("Exception occurred in ~s:data(~p, ~p, ~p, ~p) with reason ~p:~p.",
+ [Handler, StreamID, IsFin, Data, StreamState0, Class, Reason]),
+ stream_reset(State, StreamID, {internal_error, {Class, Reason},
+ 'Exception occurred in StreamHandler:data/4 call.'})
+ end;
+ false ->
+ stream_reset(State, StreamID, {stream_error, stream_closed,
+ 'DATA frame received for a closed or non-existent stream. (RFC7540 6.1)'})
+ end;
+%% Single HEADERS frame headers block.
+frame(State, {headers, StreamID, IsFin, head_fin, HeaderBlock}) ->
+ %% @todo We probably need to validate StreamID here and in 4 next clauses.
+ stream_init(State, StreamID, IsFin, HeaderBlock);
+%% HEADERS frame starting a headers block. Enter continuation mode.
+frame(State, {headers, StreamID, IsFin, head_nofin, HeaderBlockFragment}) ->
+ State#state{parse_state={continuation, StreamID, IsFin, HeaderBlockFragment}};
+%% Single HEADERS frame headers block with priority.
+frame(State, {headers, StreamID, IsFin, head_fin,
+ _IsExclusive, _DepStreamID, _Weight, HeaderBlock}) ->
+ %% @todo Handle priority.
+ stream_init(State, StreamID, IsFin, HeaderBlock);
+%% HEADERS frame starting a headers block. Enter continuation mode.
+frame(State, {headers, StreamID, IsFin, head_nofin,
+ _IsExclusive, _DepStreamID, _Weight, HeaderBlockFragment}) ->
+ %% @todo Handle priority.
+ State#state{parse_state={continuation, StreamID, IsFin, HeaderBlockFragment}};
+%% PRIORITY frame.
+frame(State, {priority, _StreamID, _IsExclusive, _DepStreamID, _Weight}) ->
+ %% @todo Validate StreamID?
+ %% @todo Handle priority.
+ State;
+%% RST_STREAM frame.
+frame(State, {rst_stream, StreamID, Reason}) ->
+ stream_reset(State, StreamID, {stream_error, Reason, 'Stream reset requested by client.'});
+%% SETTINGS frame.
+frame(State, {settings, Settings}) ->
+ %% @todo Apply SETTINGS.
+ io:format("settings ~p~n", [Settings]),
+ State;
+%% Ack for a previously sent SETTINGS frame.
+frame(State=#state{next_settings=_NextSettings}, settings_ack) ->
+ %% @todo Apply SETTINGS that require synchronization.
+ State;
+%% Unexpected PUSH_PROMISE frame.
+frame(State, {push_promise, _, _, _, _}) ->
+ terminate(State, {connection_error, protocol_error,
+ 'PUSH_PROMISE frames MUST only be sent on a peer-initiated stream. (RFC7540 6.6)'});
+%% PING frame.
+frame(State=#state{socket=Socket, transport=Transport}, {ping, Opaque}) ->
+ Transport:send(Socket, cow_http2:ping_ack(Opaque)),
+ State;
+%% Ack for a previously sent PING frame.
+%%
+%% @todo Might want to check contents but probably a waste of time.
+frame(State, {ping_ack, _Opaque}) ->
+ State;
+%% GOAWAY frame.
+frame(State, Frame={goaway, _, _, _}) ->
+ terminate(State, {stop, Frame, 'Client is going away.'});
+%% Connection-wide WINDOW_UPDATE frame.
+frame(State, {window_update, _Increment}) ->
+ %% @todo control flow
+ State;
+%% Stream-specific WINDOW_UPDATE frame.
+frame(State, {window_update, _StreamID, _Increment}) ->
+ %% @todo stream-specific control flow
+ State;
+%% Unexpected CONTINUATION frame.
+frame(State, {continuation, _, _, _}) ->
+ terminate(State, {connection_error, protocol_error,
+ 'CONTINUATION frames MUST be preceded by a HEADERS frame. (RFC7540 6.10)'}).
+
+continuation_frame(State=#state{parse_state={continuation, StreamID, IsFin, HeaderBlockFragment0}},
+ {continuation, StreamID, fin, HeaderBlockFragment1}) ->
+ stream_init(State#state{parse_state=normal}, StreamID, IsFin,
+ << HeaderBlockFragment0/binary, HeaderBlockFragment1/binary >>);
+continuation_frame(State=#state{parse_state={continuation, StreamID, IsFin, HeaderBlockFragment0}},
+ {continuation, StreamID, nofin, HeaderBlockFragment1}) ->
+ State#state{parse_state={continuation, StreamID, IsFin,
+ << HeaderBlockFragment0/binary, HeaderBlockFragment1/binary >>}};
+continuation_frame(State, _) ->
+ terminate(State, {connection_error, protocol_error,
+ 'An invalid frame was received while expecting a CONTINUATION frame. (RFC7540 6.2)'}).
+
+down(State=#state{children=Children0}, Pid, Msg) ->
+ case lists:keytake(Pid, 1, Children0) of
+ {value, {_, StreamID}, Children} ->
+ info(State#state{children=Children}, StreamID, Msg);
+ false ->
+ error_logger:error_msg("Received EXIT signal ~p for unknown process ~p.", [Msg, Pid]),
+ State
+ end.
+
+info(State=#state{handler=Handler, streams=Streams0}, StreamID, Msg) ->
+ case lists:keyfind(StreamID, #stream.id, Streams0) of
+ Stream = #stream{state=StreamState0} ->
+ try Handler:info(StreamID, Msg, StreamState0) of
+ {Commands, StreamState} ->
+ Streams = lists:keyreplace(StreamID, #stream.id, Streams0,
+ Stream#stream{state=StreamState}),
+ commands(State#state{streams=Streams}, StreamID, Commands)
+ catch Class:Reason ->
+ error_logger:error_msg("Exception occurred in ~s:info(~p, ~p, ~p) with reason ~p:~p.",
+ [Handler, StreamID, Msg, StreamState0, Class, Reason]),
+ stream_reset(State, StreamID, {internal_error, {Class, Reason},
+ 'Exception occurred in StreamHandler:info/3 call.'})
+ end;
+ false ->
+ error_logger:error_msg("Received message ~p for unknown stream ~p.", [Msg, StreamID]),
+ State
+ end.
+
+commands(State, _, []) ->
+ State;
+%% Send response headers.
+%%
+%% @todo Kill the stream if it sent a response when one has already been sent.
+%% @todo Keep IsFin in the state.
+%% @todo Same two things above apply to DATA, possibly promise too.
+commands(State=#state{socket=Socket, transport=Transport, encode_state=EncodeState0}, StreamID,
+ [{response, IsFin, StatusCode, Headers0}|Tail]) ->
+ Headers = Headers0#{<<":status">> => integer_to_binary(StatusCode)},
+ {HeaderBlock, EncodeState} = headers_encode(Headers, EncodeState0),
+ Transport:send(Socket, cow_http2:headers(StreamID, IsFin, HeaderBlock)),
+ commands(State#state{encode_state=EncodeState}, StreamID, Tail);
+%% Send a response body chunk.
+%%
+%% @todo WINDOW_UPDATE stuff require us to buffer some data.
+commands(State=#state{socket=Socket, transport=Transport}, StreamID,
+ [{data, IsFin, Data}|Tail]) ->
+ Transport:send(Socket, cow_http2:data(StreamID, IsFin, Data)),
+ commands(State, StreamID, Tail);
+%% Send a push promise.
+%%
+%% @todo We need to keep track of what promises we made so that we don't
+%% end up with an infinite loop of promises.
+commands(State0=#state{socket=Socket, transport=Transport, server_streamid=PromisedStreamID,
+ encode_state=EncodeState0}, StreamID,
+ [{promise, Method, Scheme, Authority, Path, Headers0}|Tail]) ->
+ Headers = Headers0#{<<":method">> => Method,
+ <<":scheme">> => Scheme,
+ <<":authority">> => Authority,
+ <<":path">> => Path},
+ {HeaderBlock, EncodeState} = headers_encode(Headers, EncodeState0),
+ Transport:send(Socket, cow_http2:push_promise(StreamID, PromisedStreamID, HeaderBlock)),
+ %% @todo iolist_to_binary(HeaderBlock) isn't optimal. Need a shortcut.
+ State = stream_init(State0#state{server_streamid=PromisedStreamID + 2, encode_state=EncodeState},
+ PromisedStreamID, fin, iolist_to_binary(HeaderBlock)),
+ commands(State, StreamID, Tail);
+%% @todo Update the flow control state.
+commands(State, StreamID, [{flow, _Size}|Tail]) ->
+ commands(State, StreamID, Tail);
+%% Supervise a child process.
+commands(State=#state{children=Children}, StreamID, [{spawn, Pid}|Tail]) ->
+ commands(State#state{children=[{Pid, StreamID}|Children]}, StreamID, Tail);
+%% Upgrade to a new protocol.
+%%
+%% @todo Implementation.
+%% @todo Can only upgrade if: there are no other streams and there are no children left alive.
+%% @todo For HTTP/1.1 we should reject upgrading if pipelining is used.
+commands(State, StreamID, [{upgrade, _Mod, _ModState}]) ->
+ commands(State, StreamID, []);
+commands(State, StreamID, [{upgrade, _Mod, _ModState}|Tail]) ->
+ %% @todo This is an error. Not sure what to do here yet.
+ commands(State, StreamID, Tail).
+
+terminate(#state{socket=Socket, transport=Transport, handler=Handler,
+ streams=Streams, children=Children}, Reason) ->
+ %% @todo Send GOAWAY frame; need to keep track of last good stream id; how?
+ terminate_all_streams(Streams, Reason, Handler, Children),
+ Transport:close(Socket),
+ exit({shutdown, Reason}).
+
+terminate_all_streams([], _, _, []) ->
+ ok;
+terminate_all_streams([#stream{id=StreamID, state=StreamState}|Tail], Reason, Handler, Children0) ->
+ stream_call_terminate(StreamID, Reason, Handler, StreamState),
+ Children = stream_terminate_children(Children0, StreamID, []),
+ terminate_all_streams(Tail, Reason, Handler, Children).
+
+%% Stream functions.
+
+stream_init(State0=#state{socket=Socket, transport=Transport, handler=Handler,
+ streams=Streams0, decode_state=DecodeState0}, StreamID, IsFin, HeaderBlock) ->
+ %% @todo Add clause for CONNECT requests (no scheme/path).
+ try headers_decode(HeaderBlock, DecodeState0) of
+ {Headers0=#{
+ <<":method">> := Method,
+ <<":scheme">> := Scheme,
+ <<":authority">> := Authority,
+ <<":path">> := Path}, DecodeState} ->
+ State = State0#state{decode_state=DecodeState},
+ Headers = maps:without([<<":method">>, <<":scheme">>, <<":authority">>, <<":path">>], Headers0),
+ try Handler:init(StreamID, IsFin, Method, Scheme, Authority, Path, Headers) of
+ {Commands, StreamState} ->
+ Streams = [#stream{id=StreamID, state=StreamState}|Streams0],
+ commands(State#state{streams=Streams}, StreamID, Commands)
+ catch Class:Reason ->
+ error_logger:error_msg("Exception occurred in ~s:init(~p, ~p, ~p, ~p, ~p, ~p, ~p) "
+ "with reason ~p:~p.",
+ [Handler, StreamID, IsFin, Method, Scheme, Authority, Path, Headers, Class, Reason]),
+ stream_reset(State, StreamID, {internal_error, {Class, Reason},
+ 'Exception occurred in StreamHandler:init/7 call.'})
+ end;
+ {_, DecodeState} ->
+ Transport:send(Socket, cow_http2:rst_stream(StreamID, protocol_error)),
+ State0#state{decode_state=DecodeState}
+ catch _:_ ->
+ terminate(State0, {connection_error, compression_error,
+ 'Error while trying to decode HPACK-encoded header block. (RFC7540 4.3)'})
+ end.
+
+%% @todo We might need to keep track of which stream has been reset so we don't send lots of them.
+stream_reset(State=#state{socket=Socket, transport=Transport}, StreamID,
+ StreamError={internal_error, _, _}) ->
+ Transport:send(Socket, cow_http2:rst_stream(StreamID, internal_error)),
+ stream_terminate(State, StreamID, StreamError);
+stream_reset(State=#state{socket=Socket, transport=Transport}, StreamID,
+ StreamError={stream_error, Reason, _}) ->
+ Transport:send(Socket, cow_http2:rst_stream(StreamID, Reason)),
+ stream_terminate(State, StreamID, StreamError).
+
+stream_terminate(State=#state{handler=Handler, streams=Streams0, children=Children0}, StreamID, Reason) ->
+ case lists:keytake(StreamID, #stream.id, Streams0) of
+ {value, #stream{state=StreamState}, Streams} ->
+ stream_call_terminate(StreamID, Reason, Handler, StreamState),
+ Children = stream_terminate_children(Children0, StreamID, []),
+ State#state{streams=Streams, children=Children};
+ false ->
+ %% @todo Unknown stream. Not sure what to do here. Check again once all
+ %% terminate calls have been written.
+ State
+ end.
+
+stream_call_terminate(StreamID, Reason, Handler, StreamState) ->
+ try
+ Handler:terminate(StreamID, Reason, StreamState),
+ ok
+ catch Class:Reason ->
+ error_logger:error_msg("Exception occurred in ~s:terminate(~p, ~p, ~p) with reason ~p:~p.",
+ [Handler, StreamID, Reason, StreamState, Class, Reason])
+ end.
+
+stream_terminate_children([], _, Acc) ->
+ Acc;
+stream_terminate_children([{Pid, StreamID}|Tail], StreamID, Acc) ->
+ exit(Pid, kill),
+ stream_terminate_children(Tail, StreamID, Acc);
+stream_terminate_children([Child|Tail], StreamID, Acc) ->
+ stream_terminate_children(Tail, StreamID, [Child|Acc]).
+
+%% Headers encode/decode.
+
+headers_decode(HeaderBlock, DecodeState0) ->
+ {Headers, DecodeState} = cow_hpack:decode(HeaderBlock, DecodeState0),
+ {maps:from_list(Headers), DecodeState}.
+
+%% @todo We will need to special-case the set-cookie header here.
+headers_encode(Headers0, EncodeState) ->
+ Headers = maps:to_list(Headers0),
+ cow_hpack:encode(Headers, EncodeState).
+
+%% System callbacks.
+
+-spec system_continue(_, _, #state{}) -> ok.
+system_continue(_, _, {State, Buffer}) ->
+ loop(State, Buffer).
+
+-spec system_terminate(any(), _, _, _) -> no_return().
+system_terminate(Reason, _, _, _) ->
+ exit(Reason).
+
+-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::{#state{}, binary()}.
+system_code_change(Misc, _, _, _) ->
+ {ok, Misc}.
diff --git a/src/cowboy_stream.erl b/src/cowboy_stream.erl
new file mode 100644
index 0000000..25ddb1a
--- /dev/null
+++ b/src/cowboy_stream.erl
@@ -0,0 +1,53 @@
+%% Copyright (c) 2015, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_stream).
+
+-type streamid() :: any().
+-type fin() :: fin | nofin.
+-type headers() :: map(). %% @todo cowboy:http_headers() when they're maps
+
+-type status_code() :: 100..999. %% @todo cowboy:http_status() when not binary
+-type state() :: any().
+
+-type commands() :: [{response, fin(), status_code(), headers()}
+ | {data, fin(), iodata()}
+ | {promise, binary(), binary(), binary(), binary(), headers()}
+ | {flow, auto | integer()}
+ | {spawn, pid()}
+ | {upgrade, module(), state()}].
+
+-type human_reason() :: atom().
+-type reason() :: [{internal_error, timeout | {error | exit | throw, any()}, human_reason()}
+ | {socket_error, closed | atom(), human_reason()}
+ | {stream_error, cow_http2:error_reason(), human_reason()}
+ | {connection_error, cow_http2:error_reason(), human_reason()}
+ | {stop, cow_http2:frame(), human_reason()}].
+
+-callback init(streamid(), fin(), binary(), binary(), binary(), binary(),
+ headers(), cowboy:opts()) -> {commands(), state()}.
+-callback data(streamid(), fin(), binary(), State) -> {commands(), State} when State::state().
+-callback info(streamid(), any(), state()) -> {commands(), State} when State::state().
+-callback terminate(streamid(), reason(), state()) -> any().
+
+%% @todo To optimize the number of active timers we could have a command
+%% that enables a timeout that is called in the absence of any other call,
+%% similar to what gen_server does. However the nice thing about this is
+%% that the connection process can keep a single timer around (the same
+%% one that would be used to detect half-closed sockets) and use this
+%% timer and other events to trigger the timeout in streams at their
+%% intended time.
+%%
+%% This same timer can be used to try and send PING frames to help detect
+%% that the connection is indeed unresponsive.
diff --git a/src/cowboy_tls.erl b/src/cowboy_tls.erl
new file mode 100644
index 0000000..745d502
--- /dev/null
+++ b/src/cowboy_tls.erl
@@ -0,0 +1,44 @@
+%% Copyright (c) 2015, Loïc Hoguin <[email protected]>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_tls).
+
+-export([start_link/4]).
+-export([init/5]).
+
+-spec start_link(ranch:ref(), inet:socket(), module(), cowboy:opts()) -> {ok, pid()}.
+start_link(Ref, Socket, Transport, Opts) ->
+ Pid = proc_lib:spawn_link(?MODULE, init, [self(), Ref, Socket, Transport, Opts]),
+ {ok, Pid}.
+
+-spec init(pid(), ranch:ref(), inet:socket(), module(), cowboy:opts()) -> ok.
+init(Parent, Ref, Socket, Transport, Opts) ->
+ ok = ranch:accept_ack(Ref),
+ case ssl:negotiated_protocol(Socket) of
+ {ok, <<"h2">>} ->
+ init(Parent, Ref, Socket, Transport, Opts, cowboy_http2);
+ %% @todo Implement cowboy_spdy and cowboy_http.
+ {ok, <<"spdy/3">>} ->
+ init(Parent, Ref, Socket, Transport, Opts, cowboy_spdy);
+ _ -> %% http/1.1 or no protocol negotiated.
+ init(Parent, Ref, Socket, Transport, Opts, cowboy_http)
+ end.
+
+init(Parent, Ref, Socket, Transport, Opts, Protocol) ->
+ {Handler, Type} = maps:get(stream_handler, Opts, {cowboy_stream_h, supervisor}),
+ _ = case Type of
+ worker -> ok;
+ supervisor -> process_flag(trap_exit, true)
+ end,
+ Protocol:init(Parent, Ref, Socket, Transport, Opts, Handler).