diff options
| author | Michael Klishin <mklishin@pivotal.io> | 2019-07-08 18:41:25 +0300 |
|---|---|---|
| committer | Michael Klishin <mklishin@pivotal.io> | 2019-07-08 18:41:25 +0300 |
| commit | 50ac865517c84e01a122e5a8e92a1313b7114d67 (patch) | |
| tree | cebda5964958361142a6d18d01fb1958058c6304 | |
| parent | e151ebfe2a50a5e9418e12a6712545aa7054cc40 (diff) | |
| parent | d35182b709ed91b12b9b988ba28d1dcbf063370d (diff) | |
| download | rabbitmq-server-git-50ac865517c84e01a122e5a8e92a1313b7114d67.tar.gz | |
Merge branch 'master' into management-only-api
28 files changed, 1537 insertions, 435 deletions
@@ -17,16 +17,16 @@ ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) export ERLANG_MK_FILENAME -ERLANG_MK_VERSION = 2.0.0-pre.2-311-gb20df2d +ERLANG_MK_VERSION = 2.0.0-pre.2-486-g2b7e434 ERLANG_MK_WITHOUT = # Make 3.81 and 3.82 are deprecated. -ifeq ($(MAKE_VERSION),3.81) +ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81) $(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html) endif -ifeq ($(MAKE_VERSION),3.82) +ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82) $(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html) endif @@ -47,10 +47,18 @@ verbose_0 = @ verbose_2 = set -x; verbose = $(verbose_$(V)) +ifeq ($(V),3) +SHELL := $(SHELL) -x +endif + gen_verbose_0 = @echo " GEN " $@; gen_verbose_2 = set -x; gen_verbose = $(gen_verbose_$(V)) +gen_verbose_esc_0 = @echo " GEN " $$@; +gen_verbose_esc_2 = set -x; +gen_verbose_esc = $(gen_verbose_esc_$(V)) + # Temporary files directory. ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk @@ -111,6 +119,9 @@ endif distclean:: clean distclean-tmp +$(ERLANG_MK_TMP): + $(verbose) mkdir -p $(ERLANG_MK_TMP) + distclean-tmp: $(gen_verbose) rm -rf $(ERLANG_MK_TMP) @@ -154,13 +165,17 @@ define comma_list $(subst $(space),$(comma),$(strip $(1))) endef +define escape_dquotes +$(subst ",\",$1) +endef + # Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. define erlang -$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk endef ifeq ($(PLATFORM),msys2) -core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +core_native_path = $(shell cygpath -m $1) else core_native_path = $1 endif @@ -169,7 +184,8 @@ core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$ core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) -core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) +# We skip files that contain spaces because they end up causing issues. +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2) | grep -v " ")) core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) @@ -178,6 +194,10 @@ core_ls = $(filter-out $(1),$(shell echo $(1))) # @todo Use a solution that does not require using perl. core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) +define core_render + printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + # Automated update. ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk @@ -188,15 +208,16 @@ ERLANG_MK_BUILD_DIR ?= .erlang.mk.build erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT) erlang-mk: ifdef ERLANG_MK_COMMIT - git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) - cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) + $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) + $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) else - git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) + $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) endif - if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi - $(MAKE) -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' - cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk - rm -rf $(ERLANG_MK_BUILD_DIR) + $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1 + $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR) + $(verbose) rm -rf $(ERLANG_MK_TMP) # The erlang.mk package index is bundled in the default erlang.mk build. # Search for the string "copyright" to skip to the rest of the code. @@ -212,6 +233,8 @@ ifeq ($(strip $(KERL)),) KERL := $(ERLANG_MK_TMP)/kerl/kerl endif +KERL_DIR = $(ERLANG_MK_TMP)/kerl + export KERL KERL_GIT ?= https://github.com/kerl/kerl @@ -222,24 +245,25 @@ KERL_MAKEFLAGS ?= OTP_GIT ?= https://github.com/erlang/otp define kerl_otp_target -ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(1)),) $(KERL_INSTALL_DIR)/$(1): $(KERL) - MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1) - $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1) -endif + $(verbose) if [ ! -d $$@ ]; then \ + MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \ + $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \ + fi endef define kerl_hipe_target -ifeq ($(wildcard $(KERL_INSTALL_DIR)/$1-native),) $(KERL_INSTALL_DIR)/$1-native: $(KERL) - KERL_CONFIGURE_OPTIONS=--enable-native-libs \ - MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native - $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native -endif + $(verbose) if [ ! -d $$@ ]; then \ + KERL_CONFIGURE_OPTIONS=--enable-native-libs \ + MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \ + $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \ + fi endef -$(KERL): - $(verbose) mkdir -p $(ERLANG_MK_TMP) +$(KERL): $(KERL_DIR) + +$(KERL_DIR): | $(ERLANG_MK_TMP) $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT) $(verbose) chmod +x $(KERL) @@ -247,10 +271,15 @@ $(KERL): distclean:: distclean-kerl distclean-kerl: - $(gen_verbose) rm -rf $(KERL) + $(gen_verbose) rm -rf $(KERL_DIR) # Allow users to select which version of Erlang/OTP to use for a project. +ifneq ($(strip $(LATEST_ERLANG_OTP)),) +ERLANG_OTP := $(notdir $(lastword $(sort $(filter-out $(KERL_INSTALL_DIR)/OTP_R%,\ + $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native])))))) +endif + ERLANG_OTP ?= ERLANG_HIPE ?= @@ -274,9 +303,9 @@ SHELL := env PATH=$(PATH) $(SHELL) $(eval $(call kerl_hipe_target,$(ERLANG_HIPE))) # Build Erlang/OTP only if it doesn't already exist. -ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE))$(BUILD_ERLANG_OTP),) +ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),) $(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...) -$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE) ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2) +$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2) endif endif @@ -301,7 +330,7 @@ pkg_active_commit = master PACKAGES += actordb_core pkg_actordb_core_name = actordb_core pkg_actordb_core_description = ActorDB main source -pkg_actordb_core_homepage = https://www.actordb.com/ +pkg_actordb_core_homepage = http://www.actordb.com/ pkg_actordb_core_fetch = git pkg_actordb_core_repo = https://github.com/biokoda/actordb_core pkg_actordb_core_commit = master @@ -309,7 +338,7 @@ pkg_actordb_core_commit = master PACKAGES += actordb_thrift pkg_actordb_thrift_name = actordb_thrift pkg_actordb_thrift_description = Thrift API for ActorDB -pkg_actordb_thrift_homepage = https://www.actordb.com/ +pkg_actordb_thrift_homepage = http://www.actordb.com/ pkg_actordb_thrift_fetch = git pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift pkg_actordb_thrift_commit = master @@ -357,7 +386,7 @@ pkg_antidote_commit = master PACKAGES += apns pkg_apns_name = apns pkg_apns_description = Apple Push Notification Server for Erlang -pkg_apns_homepage = https://inaka.github.com/apns4erl +pkg_apns_homepage = http://inaka.github.com/apns4erl pkg_apns_fetch = git pkg_apns_repo = https://github.com/inaka/apns4erl pkg_apns_commit = master @@ -405,9 +434,9 @@ pkg_basho_bench_commit = master PACKAGES += bcrypt pkg_bcrypt_name = bcrypt pkg_bcrypt_description = Bcrypt Erlang / C library -pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt pkg_bcrypt_fetch = git -pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git pkg_bcrypt_commit = master PACKAGES += beam @@ -525,7 +554,7 @@ pkg_bson_commit = master PACKAGES += bullet pkg_bullet_name = bullet pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. -pkg_bullet_homepage = https://ninenines.eu +pkg_bullet_homepage = http://ninenines.eu pkg_bullet_fetch = git pkg_bullet_repo = https://github.com/ninenines/bullet pkg_bullet_commit = master @@ -596,7 +625,7 @@ pkg_check_node_commit = master PACKAGES += chronos pkg_chronos_name = chronos -pkg_chronos_description = Timer module for Erlang that makes it easy to abstract time out of the tests. +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. pkg_chronos_homepage = https://github.com/lehoff/chronos pkg_chronos_fetch = git pkg_chronos_repo = https://github.com/lehoff/chronos @@ -629,7 +658,7 @@ pkg_clique_commit = develop PACKAGES += cloudi_core pkg_cloudi_core_name = cloudi_core pkg_cloudi_core_description = CloudI internal service runtime -pkg_cloudi_core_homepage = https://cloudi.org/ +pkg_cloudi_core_homepage = http://cloudi.org/ pkg_cloudi_core_fetch = git pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core pkg_cloudi_core_commit = master @@ -637,7 +666,7 @@ pkg_cloudi_core_commit = master PACKAGES += cloudi_service_api_requests pkg_cloudi_service_api_requests_name = cloudi_service_api_requests pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) -pkg_cloudi_service_api_requests_homepage = https://cloudi.org/ +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ pkg_cloudi_service_api_requests_fetch = git pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests pkg_cloudi_service_api_requests_commit = master @@ -645,7 +674,7 @@ pkg_cloudi_service_api_requests_commit = master PACKAGES += cloudi_service_db pkg_cloudi_service_db_name = cloudi_service_db pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) -pkg_cloudi_service_db_homepage = https://cloudi.org/ +pkg_cloudi_service_db_homepage = http://cloudi.org/ pkg_cloudi_service_db_fetch = git pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db pkg_cloudi_service_db_commit = master @@ -653,7 +682,7 @@ pkg_cloudi_service_db_commit = master PACKAGES += cloudi_service_db_cassandra pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service -pkg_cloudi_service_db_cassandra_homepage = https://cloudi.org/ +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ pkg_cloudi_service_db_cassandra_fetch = git pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra pkg_cloudi_service_db_cassandra_commit = master @@ -661,7 +690,7 @@ pkg_cloudi_service_db_cassandra_commit = master PACKAGES += cloudi_service_db_cassandra_cql pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service -pkg_cloudi_service_db_cassandra_cql_homepage = https://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ pkg_cloudi_service_db_cassandra_cql_fetch = git pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql pkg_cloudi_service_db_cassandra_cql_commit = master @@ -669,7 +698,7 @@ pkg_cloudi_service_db_cassandra_cql_commit = master PACKAGES += cloudi_service_db_couchdb pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service -pkg_cloudi_service_db_couchdb_homepage = https://cloudi.org/ +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ pkg_cloudi_service_db_couchdb_fetch = git pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb pkg_cloudi_service_db_couchdb_commit = master @@ -677,7 +706,7 @@ pkg_cloudi_service_db_couchdb_commit = master PACKAGES += cloudi_service_db_elasticsearch pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service -pkg_cloudi_service_db_elasticsearch_homepage = https://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ pkg_cloudi_service_db_elasticsearch_fetch = git pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch pkg_cloudi_service_db_elasticsearch_commit = master @@ -685,7 +714,7 @@ pkg_cloudi_service_db_elasticsearch_commit = master PACKAGES += cloudi_service_db_memcached pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached pkg_cloudi_service_db_memcached_description = memcached CloudI Service -pkg_cloudi_service_db_memcached_homepage = https://cloudi.org/ +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ pkg_cloudi_service_db_memcached_fetch = git pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached pkg_cloudi_service_db_memcached_commit = master @@ -693,7 +722,7 @@ pkg_cloudi_service_db_memcached_commit = master PACKAGES += cloudi_service_db_mysql pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql pkg_cloudi_service_db_mysql_description = MySQL CloudI Service -pkg_cloudi_service_db_mysql_homepage = https://cloudi.org/ +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ pkg_cloudi_service_db_mysql_fetch = git pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql pkg_cloudi_service_db_mysql_commit = master @@ -701,7 +730,7 @@ pkg_cloudi_service_db_mysql_commit = master PACKAGES += cloudi_service_db_pgsql pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service -pkg_cloudi_service_db_pgsql_homepage = https://cloudi.org/ +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ pkg_cloudi_service_db_pgsql_fetch = git pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql pkg_cloudi_service_db_pgsql_commit = master @@ -709,7 +738,7 @@ pkg_cloudi_service_db_pgsql_commit = master PACKAGES += cloudi_service_db_riak pkg_cloudi_service_db_riak_name = cloudi_service_db_riak pkg_cloudi_service_db_riak_description = Riak CloudI Service -pkg_cloudi_service_db_riak_homepage = https://cloudi.org/ +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ pkg_cloudi_service_db_riak_fetch = git pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak pkg_cloudi_service_db_riak_commit = master @@ -717,7 +746,7 @@ pkg_cloudi_service_db_riak_commit = master PACKAGES += cloudi_service_db_tokyotyrant pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service -pkg_cloudi_service_db_tokyotyrant_homepage = https://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ pkg_cloudi_service_db_tokyotyrant_fetch = git pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant pkg_cloudi_service_db_tokyotyrant_commit = master @@ -725,7 +754,7 @@ pkg_cloudi_service_db_tokyotyrant_commit = master PACKAGES += cloudi_service_filesystem pkg_cloudi_service_filesystem_name = cloudi_service_filesystem pkg_cloudi_service_filesystem_description = Filesystem CloudI Service -pkg_cloudi_service_filesystem_homepage = https://cloudi.org/ +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ pkg_cloudi_service_filesystem_fetch = git pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem pkg_cloudi_service_filesystem_commit = master @@ -733,7 +762,7 @@ pkg_cloudi_service_filesystem_commit = master PACKAGES += cloudi_service_http_client pkg_cloudi_service_http_client_name = cloudi_service_http_client pkg_cloudi_service_http_client_description = HTTP client CloudI Service -pkg_cloudi_service_http_client_homepage = https://cloudi.org/ +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ pkg_cloudi_service_http_client_fetch = git pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client pkg_cloudi_service_http_client_commit = master @@ -741,7 +770,7 @@ pkg_cloudi_service_http_client_commit = master PACKAGES += cloudi_service_http_cowboy pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service -pkg_cloudi_service_http_cowboy_homepage = https://cloudi.org/ +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ pkg_cloudi_service_http_cowboy_fetch = git pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy pkg_cloudi_service_http_cowboy_commit = master @@ -749,7 +778,7 @@ pkg_cloudi_service_http_cowboy_commit = master PACKAGES += cloudi_service_http_elli pkg_cloudi_service_http_elli_name = cloudi_service_http_elli pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service -pkg_cloudi_service_http_elli_homepage = https://cloudi.org/ +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ pkg_cloudi_service_http_elli_fetch = git pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli pkg_cloudi_service_http_elli_commit = master @@ -757,7 +786,7 @@ pkg_cloudi_service_http_elli_commit = master PACKAGES += cloudi_service_map_reduce pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service -pkg_cloudi_service_map_reduce_homepage = https://cloudi.org/ +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ pkg_cloudi_service_map_reduce_fetch = git pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce pkg_cloudi_service_map_reduce_commit = master @@ -765,7 +794,7 @@ pkg_cloudi_service_map_reduce_commit = master PACKAGES += cloudi_service_oauth1 pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service -pkg_cloudi_service_oauth1_homepage = https://cloudi.org/ +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ pkg_cloudi_service_oauth1_fetch = git pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 pkg_cloudi_service_oauth1_commit = master @@ -773,7 +802,7 @@ pkg_cloudi_service_oauth1_commit = master PACKAGES += cloudi_service_queue pkg_cloudi_service_queue_name = cloudi_service_queue pkg_cloudi_service_queue_description = Persistent Queue Service -pkg_cloudi_service_queue_homepage = https://cloudi.org/ +pkg_cloudi_service_queue_homepage = http://cloudi.org/ pkg_cloudi_service_queue_fetch = git pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue pkg_cloudi_service_queue_commit = master @@ -781,7 +810,7 @@ pkg_cloudi_service_queue_commit = master PACKAGES += cloudi_service_quorum pkg_cloudi_service_quorum_name = cloudi_service_quorum pkg_cloudi_service_quorum_description = CloudI Quorum Service -pkg_cloudi_service_quorum_homepage = https://cloudi.org/ +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ pkg_cloudi_service_quorum_fetch = git pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum pkg_cloudi_service_quorum_commit = master @@ -789,7 +818,7 @@ pkg_cloudi_service_quorum_commit = master PACKAGES += cloudi_service_router pkg_cloudi_service_router_name = cloudi_service_router pkg_cloudi_service_router_description = CloudI Router Service -pkg_cloudi_service_router_homepage = https://cloudi.org/ +pkg_cloudi_service_router_homepage = http://cloudi.org/ pkg_cloudi_service_router_fetch = git pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router pkg_cloudi_service_router_commit = master @@ -797,7 +826,7 @@ pkg_cloudi_service_router_commit = master PACKAGES += cloudi_service_tcp pkg_cloudi_service_tcp_name = cloudi_service_tcp pkg_cloudi_service_tcp_description = TCP CloudI Service -pkg_cloudi_service_tcp_homepage = https://cloudi.org/ +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ pkg_cloudi_service_tcp_fetch = git pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp pkg_cloudi_service_tcp_commit = master @@ -805,7 +834,7 @@ pkg_cloudi_service_tcp_commit = master PACKAGES += cloudi_service_timers pkg_cloudi_service_timers_name = cloudi_service_timers pkg_cloudi_service_timers_description = Timers CloudI Service -pkg_cloudi_service_timers_homepage = https://cloudi.org/ +pkg_cloudi_service_timers_homepage = http://cloudi.org/ pkg_cloudi_service_timers_fetch = git pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers pkg_cloudi_service_timers_commit = master @@ -813,7 +842,7 @@ pkg_cloudi_service_timers_commit = master PACKAGES += cloudi_service_udp pkg_cloudi_service_udp_name = cloudi_service_udp pkg_cloudi_service_udp_description = UDP CloudI Service -pkg_cloudi_service_udp_homepage = https://cloudi.org/ +pkg_cloudi_service_udp_homepage = http://cloudi.org/ pkg_cloudi_service_udp_fetch = git pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp pkg_cloudi_service_udp_commit = master @@ -821,7 +850,7 @@ pkg_cloudi_service_udp_commit = master PACKAGES += cloudi_service_validate pkg_cloudi_service_validate_name = cloudi_service_validate pkg_cloudi_service_validate_description = CloudI Validate Service -pkg_cloudi_service_validate_homepage = https://cloudi.org/ +pkg_cloudi_service_validate_homepage = http://cloudi.org/ pkg_cloudi_service_validate_fetch = git pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate pkg_cloudi_service_validate_commit = master @@ -829,7 +858,7 @@ pkg_cloudi_service_validate_commit = master PACKAGES += cloudi_service_zeromq pkg_cloudi_service_zeromq_name = cloudi_service_zeromq pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service -pkg_cloudi_service_zeromq_homepage = https://cloudi.org/ +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ pkg_cloudi_service_zeromq_fetch = git pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq pkg_cloudi_service_zeromq_commit = master @@ -877,7 +906,7 @@ pkg_covertool_commit = master PACKAGES += cowboy pkg_cowboy_name = cowboy pkg_cowboy_description = Small, fast and modular HTTP server. -pkg_cowboy_homepage = https://ninenines.eu +pkg_cowboy_homepage = http://ninenines.eu pkg_cowboy_fetch = git pkg_cowboy_repo = https://github.com/ninenines/cowboy pkg_cowboy_commit = 1.0.4 @@ -893,7 +922,7 @@ pkg_cowdb_commit = master PACKAGES += cowlib pkg_cowlib_name = cowlib pkg_cowlib_description = Support library for manipulating Web protocols. -pkg_cowlib_homepage = https://ninenines.eu +pkg_cowlib_homepage = http://ninenines.eu pkg_cowlib_fetch = git pkg_cowlib_repo = https://github.com/ninenines/cowlib pkg_cowlib_commit = 1.0.2 @@ -1085,7 +1114,7 @@ pkg_edgar_commit = master PACKAGES += edis pkg_edis_name = edis pkg_edis_description = An Erlang implementation of Redis KV Store -pkg_edis_homepage = https://inaka.github.com/edis/ +pkg_edis_homepage = http://inaka.github.com/edis/ pkg_edis_fetch = git pkg_edis_repo = https://github.com/inaka/edis pkg_edis_commit = master @@ -1194,6 +1223,14 @@ pkg_eleveldb_fetch = git pkg_eleveldb_repo = https://github.com/basho/eleveldb pkg_eleveldb_commit = master +PACKAGES += elixir +pkg_elixir_name = elixir +pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications +pkg_elixir_homepage = https://elixir-lang.org/ +pkg_elixir_fetch = git +pkg_elixir_repo = https://github.com/elixir-lang/elixir +pkg_elixir_commit = master + PACKAGES += elli pkg_elli_name = elli pkg_elli_description = Simple, robust and performant Erlang web server @@ -1340,7 +1377,7 @@ pkg_erlang_cep_commit = master PACKAGES += erlang_js pkg_erlang_js_name = erlang_js -pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey JavaScript runtime. +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. pkg_erlang_js_homepage = https://github.com/basho/erlang_js pkg_erlang_js_fetch = git pkg_erlang_js_repo = https://github.com/basho/erlang_js @@ -1413,7 +1450,7 @@ pkg_erlcron_commit = master PACKAGES += erldb pkg_erldb_name = erldb pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang -pkg_erldb_homepage = https://erldb.org +pkg_erldb_homepage = http://erldb.org pkg_erldb_fetch = git pkg_erldb_repo = https://github.com/erldb/erldb pkg_erldb_commit = master @@ -1677,7 +1714,7 @@ pkg_evum_commit = master PACKAGES += exec pkg_exec_name = erlexec pkg_exec_description = Execute and control OS processes from Erlang/OTP. -pkg_exec_homepage = https://saleyn.github.com/erlexec +pkg_exec_homepage = http://saleyn.github.com/erlexec pkg_exec_fetch = git pkg_exec_repo = https://github.com/saleyn/erlexec pkg_exec_commit = master @@ -1772,7 +1809,7 @@ pkg_find_crate_commit = master PACKAGES += fix pkg_fix_name = fix -pkg_fix_description = https://fixtrading.org implementation. +pkg_fix_description = http://fixprotocol.org/ implementation. pkg_fix_homepage = https://github.com/maxlapshin/fix pkg_fix_fetch = git pkg_fix_repo = https://github.com/maxlapshin/fix @@ -1890,6 +1927,14 @@ pkg_gen_icmp_fetch = git pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp pkg_gen_icmp_commit = master +PACKAGES += gen_leader +pkg_gen_leader_name = gen_leader +pkg_gen_leader_description = leader election behavior +pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival +pkg_gen_leader_fetch = git +pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival +pkg_gen_leader_commit = master + PACKAGES += gen_nb_server pkg_gen_nb_server_name = gen_nb_server pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers @@ -1906,6 +1951,14 @@ pkg_gen_paxos_fetch = git pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos pkg_gen_paxos_commit = master +PACKAGES += gen_rpc +pkg_gen_rpc_name = gen_rpc +pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages +pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git +pkg_gen_rpc_fetch = git +pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git +pkg_gen_rpc_commit = master + PACKAGES += gen_smtp pkg_gen_smtp_name = gen_smtp pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules @@ -2421,7 +2474,7 @@ pkg_lambdapad_commit = master PACKAGES += lasp pkg_lasp_name = lasp pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations -pkg_lasp_homepage = https://lasp-lang.org/ +pkg_lasp_homepage = http://lasp-lang.org/ pkg_lasp_fetch = git pkg_lasp_repo = https://github.com/lasp-lang/lasp pkg_lasp_commit = master @@ -2469,7 +2522,7 @@ pkg_ling_commit = master PACKAGES += live pkg_live_name = live pkg_live_description = Automated module and configuration reloader. -pkg_live_homepage = https://ninenines.eu +pkg_live_homepage = http://ninenines.eu pkg_live_fetch = git pkg_live_repo = https://github.com/ninenines/live pkg_live_commit = master @@ -2885,7 +2938,7 @@ pkg_oauth2_commit = master PACKAGES += observer_cli pkg_observer_cli_name = observer_cli pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line -pkg_observer_cli_homepage = https://zhongwencool.github.io/observer_cli +pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli pkg_observer_cli_fetch = git pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli pkg_observer_cli_commit = master @@ -2938,6 +2991,14 @@ pkg_openpoker_fetch = git pkg_openpoker_repo = https://github.com/hpyhacking/openpoker pkg_openpoker_commit = master +PACKAGES += otpbp +pkg_otpbp_name = otpbp +pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19) +pkg_otpbp_homepage = https://github.com/Ledest/otpbp +pkg_otpbp_fetch = git +pkg_otpbp_repo = https://github.com/Ledest/otpbp +pkg_otpbp_commit = master + PACKAGES += pal pkg_pal_name = pal pkg_pal_description = Pragmatic Authentication Library @@ -2978,6 +3039,14 @@ pkg_percept2_fetch = git pkg_percept2_repo = https://github.com/huiqing/percept2 pkg_percept2_commit = master +PACKAGES += pgo +pkg_pgo_name = pgo +pkg_pgo_description = Erlang Postgres client and connection pool +pkg_pgo_homepage = https://github.com/erleans/pgo.git +pkg_pgo_fetch = git +pkg_pgo_repo = https://github.com/erleans/pgo.git +pkg_pgo_commit = master + PACKAGES += pgsql pkg_pgsql_name = pgsql pkg_pgsql_description = Erlang PostgreSQL driver @@ -3205,7 +3274,7 @@ pkg_rafter_commit = master PACKAGES += ranch pkg_ranch_name = ranch pkg_ranch_description = Socket acceptor pool for TCP protocols. -pkg_ranch_homepage = https://ninenines.eu +pkg_ranch_homepage = http://ninenines.eu pkg_ranch_fetch = git pkg_ranch_repo = https://github.com/ninenines/ranch pkg_ranch_commit = 1.2.1 @@ -3221,7 +3290,7 @@ pkg_rbeacon_commit = master PACKAGES += rebar pkg_rebar_name = rebar pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. -pkg_rebar_homepage = https://www.rebar3.org +pkg_rebar_homepage = http://www.rebar3.org pkg_rebar_fetch = git pkg_rebar_repo = https://github.com/rebar/rebar3 pkg_rebar_commit = master @@ -3301,7 +3370,7 @@ pkg_relx_commit = master PACKAGES += resource_discovery pkg_resource_discovery_name = resource_discovery pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. -pkg_resource_discovery_homepage = https://erlware.org/ +pkg_resource_discovery_homepage = http://erlware.org/ pkg_resource_discovery_fetch = git pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery pkg_resource_discovery_commit = master @@ -3469,7 +3538,7 @@ pkg_seestar_commit = master PACKAGES += service pkg_service_name = service pkg_service_description = A minimal Erlang behavior for creating CloudI internal services -pkg_service_homepage = https://cloudi.org/ +pkg_service_homepage = http://cloudi.org/ pkg_service_fetch = git pkg_service_repo = https://github.com/CloudI/service pkg_service_commit = master @@ -3509,7 +3578,7 @@ pkg_sgte_commit = master PACKAGES += sheriff pkg_sheriff_name = sheriff pkg_sheriff_description = Parse transform for type based validation. -pkg_sheriff_homepage = https://ninenines.eu +pkg_sheriff_homepage = http://ninenines.eu pkg_sheriff_fetch = git pkg_sheriff_repo = https://github.com/extend/sheriff pkg_sheriff_commit = master @@ -3861,7 +3930,7 @@ pkg_traffic_tools_commit = master PACKAGES += trails pkg_trails_name = trails pkg_trails_description = A couple of improvements over Cowboy Routes -pkg_trails_homepage = https://inaka.github.io/cowboy-trails/ +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ pkg_trails_fetch = git pkg_trails_repo = https://github.com/inaka/cowboy-trails pkg_trails_commit = master @@ -3893,9 +3962,9 @@ pkg_trie_commit = master PACKAGES += triq pkg_triq_name = triq pkg_triq_description = Trifork QuickCheck -pkg_triq_homepage = https://github.com/triqng/triq +pkg_triq_homepage = https://triq.gitlab.io pkg_triq_fetch = git -pkg_triq_repo = https://github.com/triqng/triq.git +pkg_triq_repo = https://gitlab.com/triq/triq.git pkg_triq_commit = master PACKAGES += tunctl @@ -4077,7 +4146,7 @@ pkg_worker_pool_commit = master PACKAGES += wrangler pkg_wrangler_name = wrangler pkg_wrangler_description = Import of the Wrangler svn repository. -pkg_wrangler_homepage = https://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html pkg_wrangler_fetch = git pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler pkg_wrangler_commit = master @@ -4132,7 +4201,7 @@ pkg_yaws_commit = master PACKAGES += zab_engine pkg_zab_engine_name = zab_engine -pkg_zab_engine_description = zab protocol implement by erlang +pkg_zab_engine_description = zab propotocol implement by erlang pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine pkg_zab_engine_fetch = git pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine @@ -4236,6 +4305,9 @@ export DEPS_DIR REBAR_DEPS_DIR = $(DEPS_DIR) export REBAR_DEPS_DIR +REBAR_GIT ?= https://github.com/rebar/rebar +REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01 + # External "early" plugins (see core/plugins.mk for regular plugins). # They both use the core_dep_plugin macro. @@ -4259,12 +4331,20 @@ $(foreach p,$(DEP_EARLY_PLUGINS),\ dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) -dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit))) LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a))) -ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) +# When we are calling an app directly we don't want to include it here +# otherwise it'll be treated both as an apps and a top-level project. +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ifdef ROOT_DIR +ifndef IS_APP +ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS)) +endif +endif + ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) ifeq ($(ERL_LIBS),) ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) @@ -4278,54 +4358,86 @@ export NO_AUTOPATCH # Verbosity. -dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))"; dep_verbose_2 = set -x; dep_verbose = $(dep_verbose_$(V)) -# Core targets. +# Optimization: don't recompile deps unless truly necessary. -apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log -ifeq ($(IS_APP)$(IS_DEP),) - $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log +ifndef IS_DEP +ifneq ($(MAKELEVEL),0) +$(shell rm -f ebin/dep_built) endif - $(verbose) mkdir -p $(ERLANG_MK_TMP) +endif + +# Core targets. + +ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS)) + +apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP) # Create ebin directory for all apps to make sure Erlang recognizes them # as proper OTP applications when using -include_lib. This is a temporary # fix, a proper fix would be to compile apps/* in the right order. ifndef IS_APP +ifneq ($(ALL_APPS_DIRS),) $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ mkdir -p $$dep/ebin; \ done endif -# at the toplevel: if LOCAL_DEPS is defined with at least one local app, only -# compile that list of apps. otherwise, compile everything. -# within an app: compile all LOCAL_DEPS that are (uncompiled) local apps - $(verbose) set -e; for dep in $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS)) ; do \ +endif +# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only +# compile that list of apps. Otherwise, compile everything. +# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps. +ifneq ($(ALL_APPS_DIRS_TO_BUILD),) + $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \ if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \ :; \ else \ echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \ - $(MAKE) -C $$dep IS_APP=1; \ + $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \ fi \ done +endif clean-tmp-deps.log: ifeq ($(IS_APP)$(IS_DEP),) - $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log + $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log +endif + +# Erlang.mk does not rebuild dependencies after they were compiled +# once. If a developer is working on the top-level project and some +# dependencies at the same time, he may want to change this behavior. +# There are two solutions: +# 1. Set `FULL=1` so that all dependencies are visited and +# recursively recompiled if necessary. +# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that +# should be recompiled (instead of the whole set). + +FORCE_REBUILD ?= + +ifeq ($(origin FULL),undefined) +ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),) +define force_rebuild_dep +echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")" +endef +endif endif ifneq ($(SKIP_DEPS),) deps:: else -deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log - $(verbose) mkdir -p $(ERLANG_MK_TMP) - $(verbose) set -e; for dep in $(ALL_DEPS_DIRS) ; do \ +deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP) +ifneq ($(ALL_DEPS_DIRS),) + $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \ if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ :; \ else \ echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ - if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ else \ echo "Error: No Makefile to build dependency $$dep." >&2; \ exit 2; \ @@ -4333,6 +4445,7 @@ deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log fi \ done endif +endif # Deps related targets. @@ -4405,13 +4518,23 @@ define dep_autopatch_gen "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile endef +# We use flock/lockf when available to avoid concurrency issues. define dep_autopatch_fetch_rebar - mkdir -p $(ERLANG_MK_TMP); \ + if command -v flock >/dev/null; then \ + flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \ + elif command -v lockf >/dev/null; then \ + lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \ + else \ + $(call dep_autopatch_fetch_rebar2); \ + fi +endef + +define dep_autopatch_fetch_rebar2 if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ - git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \ cd $(ERLANG_MK_TMP)/rebar; \ - git checkout -q 576e12171ab8d69b048b827b92aa65d067deea01; \ - $(MAKE); \ + git checkout -q $(REBAR_COMMIT); \ + ./bootstrap; \ cd -; \ fi endef @@ -4505,13 +4628,27 @@ define dep_autopatch_rebar.erl false end end, + SemVsn = fun + ("~>" ++ S0) -> + S = case S0 of + " " ++ S1 -> S1; + _ -> S0 + end, + case length([ok || $$. <- S]) of + 0 -> S ++ ".0.0"; + 1 -> S ++ ".0"; + _ -> S + end; + (S) -> S + end, fun() -> File = case lists:keyfind(deps, 1, Conf) of false -> []; {_, Deps} -> [begin case case Dep of N when is_atom(N) -> GetHexVsn(N); - {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_atom(N), is_list(S) -> {N, {hex, SemVsn(S)}}; + {_, S, {pkg, N}} -> {N, {hex, S}}; {N, S} when is_tuple(S) -> {N, S}; {N, _, S} -> {N, S}; {N, _, S, _} -> {N, S}; @@ -4537,6 +4674,8 @@ define dep_autopatch_rebar.erl {_, Files} -> Names = [[" ", case lists:reverse(F) of "lre." ++ Elif -> lists:reverse(Elif); + "lrx." ++ Elif -> lists:reverse(Elif); + "lry." ++ Elif -> lists:reverse(Elif); Elif -> lists:reverse(Elif) end] || "src/" ++ F <- Files], Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) @@ -4547,7 +4686,8 @@ define dep_autopatch_rebar.erl Write("\npre-deps::\n"), Write("\npre-app::\n"), PatchHook = fun(Cmd) -> - case Cmd of + Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]), + case Cmd2 of "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); @@ -4573,9 +4713,10 @@ define dep_autopatch_rebar.erl end || H <- Hooks] end end(), - ShellToMk = fun(V) -> - re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), - "-Werror\\\\b", "", [{return, list}, global]) + ShellToMk = fun(V0) -> + V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]), + V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]), + re:replace(V, "-Werror\\\\b", "", [{return, list}, global]) end, PortSpecs = fun() -> case lists:keyfind(port_specs, 1, Conf) of @@ -4608,7 +4749,7 @@ define dep_autopatch_rebar.erl case PortSpecs of [] -> ok; _ -> - Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"), PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n", [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lerl_interface -lei\n", @@ -4645,7 +4786,7 @@ define dep_autopatch_rebar.erl darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; _ -> "" end, - "\n\nall:: ", Output, "\n\n", + "\n\nall:: ", Output, "\n\t@:\n\n", "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", @@ -4662,6 +4803,17 @@ define dep_autopatch_rebar.erl end, [PortSpec(S) || S <- PortSpecs] end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins0} -> + Plugins = [P || P <- Plugins0, is_tuple(P)], + case lists:keyfind('lfe-compile', 1, Plugins) of + false -> ok; + _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n") + end + end + end(), Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"), RunPlugin = fun(Plugin, Step) -> case erlang:function_exported(Plugin, Step, 2) of @@ -4676,7 +4828,8 @@ define dep_autopatch_rebar.erl fun() -> case lists:keyfind(plugins, 1, Conf) of false -> ok; - {_, Plugins} -> + {_, Plugins0} -> + Plugins = [P || P <- Plugins0, is_atom(P)], [begin case lists:keyfind(deps, 1, Conf) of false -> ok; @@ -4734,7 +4887,7 @@ define dep_autopatch_appsrc.erl {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), L1 = lists:keystore(modules, 1, L0, {modules, []}), L2 = case lists:keyfind(vsn, 1, L1) of - {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); + {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))}); {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"}); _ -> L1 end, @@ -4750,6 +4903,16 @@ define dep_fetch_git cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); endef +define dep_fetch_git-subfolder + mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \ + git clone -q -n -- $(call dep_repo,$1) \ + $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \ + cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \ + && git checkout -q $(call dep_commit,$1); \ + ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \ + $(DEPS_DIR)/$(call dep_name,$1); +endef + define dep_fetch_git-submodule git submodule update --init -- $(DEPS_DIR)/$1; endef @@ -4802,9 +4965,9 @@ define dep_fetch endef define dep_target -$(DEPS_DIR)/$(call dep_name,$1): +$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP) $(eval DEP_NAME := $(call dep_name,$1)) - $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \ exit 17; \ @@ -4813,7 +4976,7 @@ $(DEPS_DIR)/$(call dep_name,$1): $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1)) $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \ && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \ - echo " AUTO " $(1); \ + echo " AUTO " $(DEP_STR); \ cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \ fi - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ @@ -4821,6 +4984,12 @@ $(DEPS_DIR)/$(call dep_name,$1): cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ fi ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME) +endif + +.PHONY: autopatch-$(call dep_name,$1) + +autopatch-$(call dep_name,$1):: $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ echo " PATCH Downloading rabbitmq-codegen"; \ @@ -4836,10 +5005,11 @@ ifeq ($(filter $(1),$(NO_AUTOPATCH)),) echo " PATCH Downloading rabbitmq-codegen"; \ git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ fi \ + elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \ + ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \ else \ - $$(call dep_autopatch,$(DEP_NAME)) \ + $$(call dep_autopatch,$(call dep_name,$1)) \ fi -endif endef $(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) @@ -4876,37 +5046,6 @@ ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log -# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> -# This file is part of erlang.mk and subject to the terms of the ISC License. - -# Verbosity. - -proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); -proto_verbose = $(proto_verbose_$(V)) - -# Core targets. - -define compile_proto - $(verbose) mkdir -p ebin/ include/ - $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) - $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl - $(verbose) rm ebin/*.erl -endef - -define compile_proto.erl - [begin - protobuffs_compile:generate_source(F, - [{output_include_dir, "./include"}, - {output_src_dir, "./ebin"}]) - end || F <- string:tokens("$(1)", " ")], - halt(). -endef - -ifneq ($(wildcard src/),) -ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) - $(if $(strip $?),$(call compile_proto,$?)) -endif - # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -4962,13 +5101,9 @@ ifneq ($(wildcard src/),) # Targets. -ifeq ($(wildcard ebin/test),) -app:: deps $(PROJECT).d - $(verbose) $(MAKE) --no-print-directory app-build -else -app:: clean deps $(PROJECT).d +app:: $(if $(wildcard ebin/test),clean) deps + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d $(verbose) $(MAKE) --no-print-directory app-build -endif ifeq ($(wildcard src/$(PROJECT_MOD).erl),) define app_file @@ -5017,7 +5152,7 @@ define compile_asn1 $(verbose) mkdir -p include/ $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1) $(verbose) mv asn1/*.erl src/ - $(verbose) mv asn1/*.hrl include/ + -$(verbose) mv asn1/*.hrl include/ $(verbose) mv asn1/*.asn1db include/ endef @@ -5055,6 +5190,14 @@ define makedep.erl E = ets:new(makedep, [bag]), G = digraph:new([acyclic]), ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + DepsDir = "$(call core_native_path,$(DEPS_DIR))", + AppsDir = "$(call core_native_path,$(APPS_DIR))", + DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))", + DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))", + AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))", + AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))", + DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")), + AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")), Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles], Add = fun (Mod, Dep) -> case lists:keyfind(Dep, 1, Modules) of @@ -5069,33 +5212,50 @@ define makedep.erl end, AddHd = fun (F, Mod, DepFile) -> case file:open(DepFile, [read]) of - {error, enoent} -> ok; + {error, enoent} -> + ok; {ok, Fd} -> - F(F, Fd, Mod), {_, ModFile} = lists:keyfind(Mod, 1, Modules), - ets:insert(E, {ModFile, DepFile}) + case ets:match(E, {ModFile, DepFile}) of + [] -> + ets:insert(E, {ModFile, DepFile}), + F(F, Fd, Mod,0); + _ -> ok + end end end, + SearchHrl = fun + F(_Hrl, []) -> {error,enoent}; + F(Hrl, [Dir|Dirs]) -> + HrlF = filename:join([Dir,Hrl]), + case filelib:is_file(HrlF) of + true -> + {ok, HrlF}; + false -> F(Hrl,Dirs) + end + end, Attr = fun - (F, Mod, behavior, Dep) -> Add(Mod, Dep); - (F, Mod, behaviour, Dep) -> Add(Mod, Dep); - (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep); - (F, Mod, compile, Opts) when is_list(Opts) -> + (_F, Mod, behavior, Dep) -> + Add(Mod, Dep); + (_F, Mod, behaviour, Dep) -> + Add(Mod, Dep); + (_F, Mod, compile, {parse_transform, Dep}) -> + Add(Mod, Dep); + (_F, Mod, compile, Opts) when is_list(Opts) -> case proplists:get_value(parse_transform, Opts) of undefined -> ok; Dep -> Add(Mod, Dep) end; (F, Mod, include, Hrl) -> - case filelib:is_file("include/" ++ Hrl) of - true -> AddHd(F, Mod, "include/" ++ Hrl); - false -> - case filelib:is_file("src/" ++ Hrl) of - true -> AddHd(F, Mod, "src/" ++ Hrl); - false -> false - end + case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of + {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); + {error, _} -> false + end; + (F, Mod, include_lib, Hrl) -> + case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of + {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); + {error, _} -> false end; - (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl); - (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl); (F, Mod, import, {Imp, _}) -> IsFile = case lists:keyfind(Imp, 1, Modules) of @@ -5108,21 +5268,31 @@ define makedep.erl end; (_, _, _, _) -> ok end, - MakeDepend = fun(F, Fd, Mod) -> - case io:parse_erl_form(Fd, undefined) of - {ok, {attribute, _, Key, Value}, _} -> - Attr(F, Mod, Key, Value), - F(F, Fd, Mod); - {eof, _} -> - file:close(Fd); - _ -> - F(F, Fd, Mod) - end + MakeDepend = fun + (F, Fd, Mod, StartLocation) -> + {ok, Filename} = file:pid2name(Fd), + case io:parse_erl_form(Fd, undefined, StartLocation) of + {ok, AbsData, EndLocation} -> + case AbsData of + {attribute, _, Key, Value} -> + Attr(F, Mod, Key, Value), + F(F, Fd, Mod, EndLocation); + _ -> F(F, Fd, Mod, EndLocation) + end; + {eof, _ } -> file:close(Fd); + {error, ErrorDescription } -> + file:close(Fd); + {error, ErrorInfo, ErrorLocation} -> + F(F, Fd, Mod, ErrorLocation) + end, + ok end, [begin Mod = list_to_atom(filename:basename(F, ".erl")), - {ok, Fd} = file:open(F, [read]), - MakeDepend(MakeDepend, Fd, Mod) + case file:open(F, [read]) of + {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0); + {error, enoent} -> ok + end end || F <- ErlFiles], Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))), CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)], @@ -5135,6 +5305,7 @@ define makedep.erl end end, ok = file:write_file("$(1)", [ + "# Generated by Erlang.mk. Edit at your own risk!\n\n", [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend], "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n" ]), @@ -5146,10 +5317,10 @@ $(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) endif +ifeq ($(IS_APP)$(IS_DEP),) ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) # Rebuild everything when the Makefile changes. -$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) - $(verbose) mkdir -p $(ERLANG_MK_TMP) +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP) $(verbose) if test -f $@; then \ touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ touch -c $(PROJECT).d; \ @@ -5159,6 +5330,10 @@ $(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change endif +endif + +$(PROJECT).d:: + $(verbose) : include $(wildcard $(PROJECT).d) @@ -5172,6 +5347,13 @@ define compile_erl -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) endef +define validate_app_file + case file:consult("ebin/$(PROJECT).app") of + {ok, _} -> halt(); + _ -> halt(1) + end +endef + ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) @@ -5181,9 +5363,13 @@ ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.s ifeq ($(wildcard src/$(PROJECT).app.src),) $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \ > ebin/$(PROJECT).app + $(verbose) if ! $(call erlang,$(call validate_app_file)); then \ + echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \ + exit 1; \ + fi else $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ - echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \ exit 1; \ fi $(appsrc_verbose) cat src/$(PROJECT).app.src \ @@ -5221,7 +5407,7 @@ ifneq ($(SKIP_DEPS),) doc-deps: else doc-deps: $(ALL_DOC_DEPS_DIRS) - $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done + $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done endif # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> @@ -5266,29 +5452,50 @@ ifneq ($(SKIP_DEPS),) test-deps: else test-deps: $(ALL_TEST_DEPS_DIRS) - $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done + $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \ + if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + else \ + $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ + fi \ + done endif ifneq ($(wildcard $(TEST_DIR)),) test-dir: - $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ - $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \ + -pa ebin/ -I include/ $(call core_find,$(TEST_DIR)/,*.erl) endif -ifeq ($(wildcard src),) -test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) -test-build:: clean deps test-deps - $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" -else -ifeq ($(wildcard ebin/test),) +test-build:: IS_TEST=1 test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) -test-build:: clean deps test-deps $(PROJECT).d - $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps) +# We already compiled everything when IS_APP=1. +ifndef IS_APP +ifneq ($(wildcard $(TEST_DIR)),) + $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" +endif +ifneq ($(wildcard src),) + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" $(gen_verbose) touch ebin/test -else -test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) -test-build:: deps test-deps $(PROJECT).d - $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif +endif + +# Roughly the same as test-build, but when IS_APP=1. +# We only care about compiling the current application. +ifdef IS_APP +test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build-app:: deps test-deps +ifneq ($(wildcard $(TEST_DIR)),) + $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" +endif +ifneq ($(wildcard src),) + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(gen_verbose) touch ebin/test +endif endif clean:: clean-test-dir @@ -5297,7 +5504,6 @@ clean-test-dir: ifneq ($(wildcard $(TEST_DIR)/*.beam),) $(gen_verbose) rm -f $(TEST_DIR)/*.beam endif -endif # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -5329,11 +5535,8 @@ $(call comma_list,$(foreach d,$(DEPS),\ {erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}. endef -$(eval _compat_rebar_config = $$(compat_rebar_config)) -$(eval export _compat_rebar_config) - rebar.config: - $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config) # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -5403,8 +5606,8 @@ endef asciidoc-manual:: doc-deps asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES) - $(call erlang,$(call asciidoc2man.erl,$?)) - $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;) + $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?)) + $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;) install-docs:: install-asciidoc @@ -5471,31 +5674,30 @@ endef # To prevent autocompletion issues with ZSH, we add "include erlang.mk" # separately during the actual bootstrap. -ifdef SP define bs_Makefile PROJECT = $p PROJECT_DESCRIPTION = New project PROJECT_VERSION = 0.1.0 - +$(if $(SP), # Whitespace to be used when creating files from templates. SP = $(SP) - +) endef -else -define bs_Makefile -PROJECT = $p -PROJECT_DESCRIPTION = New project -PROJECT_VERSION = 0.1.0 - -endef -endif define bs_apps_Makefile PROJECT = $p PROJECT_DESCRIPTION = New project PROJECT_VERSION = 0.1.0 +$(if $(SP), +# Whitespace to be used when creating files from templates. +SP = $(SP) +) +# Make sure we know where the applications are located. +ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app) +APPS_DIR ?= .. +DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app) -include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +include $$(ROOT_DIR)/erlang.mk endef define bs_app @@ -5515,8 +5717,8 @@ endef define bs_relx_config {release, {$p_release, "1"}, [$p, sasl, runtime_tools]}. {extended_start_script, true}. -{sys_config, "rel/sys.config"}. -{vm_args, "rel/vm.args"}. +{sys_config, "config/sys.config"}. +{vm_args, "config/vm.args"}. endef define bs_sys_config @@ -5672,6 +5874,51 @@ code_change(_OldVsn, StateName, StateData, _Extra) -> {ok, StateName, StateData}. endef +define tpl_gen_statem +-module($(n)). +-behaviour(gen_statem). + +%% API. +-export([start_link/0]). + +%% gen_statem. +-export([callback_mode/0]). +-export([init/1]). +-export([state_name/3]). +-export([handle_event/4]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_statem:start_link(?MODULE, [], []). + +%% gen_statem. + +callback_mode() -> + state_functions. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_EventType, _EventData, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_EventType, _EventData, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + define tpl_cowboy_loop -module($(n)). -behaviour(cowboy_loop_handler). @@ -5774,10 +6021,6 @@ endef # Plugin-specific targets. -define render_template - $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) -endef - ifndef WS ifdef SP WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) @@ -5791,40 +6034,44 @@ ifneq ($(wildcard src/),) $(error Error: src/ directory already exists) endif $(eval p := $(PROJECT)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) $(eval n := $(PROJECT)_sup) - $(call render_template,bs_Makefile,Makefile) + $(verbose) $(call core_render,bs_Makefile,Makefile) $(verbose) echo "include erlang.mk" >> Makefile $(verbose) mkdir src/ ifdef LEGACY - $(call render_template,bs_appsrc,src/$(PROJECT).app.src) + $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src) endif - $(call render_template,bs_app,src/$(PROJECT)_app.erl) - $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl) + $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl) bootstrap-lib: ifneq ($(wildcard src/),) $(error Error: src/ directory already exists) endif $(eval p := $(PROJECT)) - $(call render_template,bs_Makefile,Makefile) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) + $(verbose) $(call core_render,bs_Makefile,Makefile) $(verbose) echo "include erlang.mk" >> Makefile $(verbose) mkdir src/ ifdef LEGACY - $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) + $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src) endif bootstrap-rel: ifneq ($(wildcard relx.config),) $(error Error: relx.config already exists) endif -ifneq ($(wildcard rel/),) - $(error Error: rel/ directory already exists) +ifneq ($(wildcard config/),) + $(error Error: config/ directory already exists) endif $(eval p := $(PROJECT)) - $(call render_template,bs_relx_config,relx.config) - $(verbose) mkdir rel/ - $(call render_template,bs_sys_config,rel/sys.config) - $(call render_template,bs_vm_args,rel/vm.args) + $(verbose) $(call core_render,bs_relx_config,relx.config) + $(verbose) mkdir config/ + $(verbose) $(call core_render,bs_sys_config,config/sys.config) + $(verbose) $(call core_render,bs_vm_args,config/vm.args) new-app: ifndef in @@ -5834,14 +6081,16 @@ ifneq ($(wildcard $(APPS_DIR)/$in),) $(error Error: Application $in already exists) endif $(eval p := $(in)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) $(eval n := $(in)_sup) $(verbose) mkdir -p $(APPS_DIR)/$p/src/ - $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) + $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) ifdef LEGACY - $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) + $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) endif - $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) - $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) new-lib: ifndef in @@ -5851,10 +6100,12 @@ ifneq ($(wildcard $(APPS_DIR)/$in),) $(error Error: Application $in already exists) endif $(eval p := $(in)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) $(verbose) mkdir -p $(APPS_DIR)/$p/src/ - $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) + $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) ifdef LEGACY - $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) + $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) endif new: @@ -5868,13 +6119,14 @@ ifndef n $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) endif ifdef in - $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= + $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl) else - $(call render_template,tpl_$(t),src/$(n).erl) + $(verbose) $(call core_render,tpl_$(t),src/$(n).erl) endif list-templates: - $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + $(verbose) @echo Available templates: + $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) # Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -5913,8 +6165,8 @@ ifeq ($(PLATFORM),msys2) CXXFLAGS ?= -O3 -finline-functions -Wall else ifeq ($(PLATFORM),darwin) CC ?= cc - CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes - CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -Wall LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress else ifeq ($(PLATFORM),freebsd) CC ?= cc @@ -5981,6 +6233,8 @@ $(C_SRC_OUTPUT_FILE): $(OBJECTS) $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ -o $(C_SRC_OUTPUT_FILE) +$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV) + %.o: %.c $(COMPILE_C) $(OUTPUT_OPTION) $< @@ -6001,12 +6255,16 @@ clean-c_src: endif ifneq ($(wildcard $(C_SRC_DIR)),) +ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().') + $(C_SRC_ENV): $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \ io_lib:format( \ + \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \ \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ - \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \ + \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \ [code:root_dir(), erlang:system_info(version), \ code:lib_dir(erl_interface, include), \ code:lib_dir(erl_interface, lib)])), \ @@ -6018,6 +6276,10 @@ distclean-c_src-env: $(gen_verbose) rm -f $(C_SRC_ENV) -include $(C_SRC_ENV) + +ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR)) +$(shell rm -f $(C_SRC_ENV)) +endif endif # Templates. @@ -6103,12 +6365,15 @@ endif ifneq ($(wildcard src/$n.erl),) $(error Error: src/$n.erl already exists) endif +ifndef n + $(error Usage: $(MAKE) new-nif n=NAME [in=APP]) +endif ifdef in $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= else $(verbose) mkdir -p $(C_SRC_DIR) src/ - $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) - $(call render_template,bs_erl_nif,src/$n.erl) + $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(verbose) $(call core_render,bs_erl_nif,src/$n.erl) endif # Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> @@ -6137,8 +6402,10 @@ ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erll ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE))) ci-setup:: + $(verbose) : ci-extra:: + $(verbose) : ci_verbose_0 = @echo " CI " $(1); ci_verbose = $(ci_verbose_$(V)) @@ -6159,8 +6426,8 @@ $(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp))) $(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native))) $(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm))) -$(foreach otp,$(CI_OTP),$(eval $(call kerl_otp_target,$(otp)))) -$(foreach otp,$(sort $(CI_HIPE) $(CI_ERLLLVM)),$(eval $(call kerl_hipe_target,$(otp)))) +$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp)))) +$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp)))) help:: $(verbose) printf "%s\n" "" \ @@ -6193,7 +6460,9 @@ CT_LOGS_DIR ?= $(CURDIR)/logs tests:: ct +ifndef KEEP_LOGS distclean:: distclean-ct +endif help:: $(verbose) printf "%s\n" "" \ @@ -6208,16 +6477,16 @@ help:: CT_RUN = ct_run \ -no_auto_compile \ -noinput \ - -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \ + -pa $(CURDIR)/ebin $(TEST_DIR) \ -dir $(TEST_DIR) \ -logdir $(CT_LOGS_DIR) ifeq ($(CT_SUITES),) -ct: $(if $(IS_APP),,apps-ct) +ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct) else # We do not run tests if we are in an apps/* with no test directory. ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1) -ct: test-build $(if $(IS_APP),,apps-ct) +ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct) $(verbose) mkdir -p $(CT_LOGS_DIR) $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) endif @@ -6225,30 +6494,34 @@ endif ifneq ($(ALL_APPS_DIRS),) define ct_app_target -apps-ct-$1: - $(MAKE) -C $1 ct IS_APP=1 +apps-ct-$1: test-build + $$(MAKE) -C $1 ct IS_APP=1 endef $(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app)))) -apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS)) +apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS)) endif -ifndef t -CT_EXTRA = -else +ifdef t ifeq (,$(findstring :,$t)) CT_EXTRA = -group $t else t_words = $(subst :, ,$t) CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words)) endif +else +ifdef c +CT_EXTRA = -case $c +else +CT_EXTRA = +endif endif define ct_suite_target ct-$(1): test-build $(verbose) mkdir -p $(CT_LOGS_DIR) - $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS) + $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS) endef $(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) @@ -6269,6 +6542,7 @@ export DIALYZER_PLT PLT_APPS ?= DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS) DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs +DIALYZER_PLT_OPTS ?= # Core targets. @@ -6299,11 +6573,17 @@ define filter_opts.erl halt(). endef +# DIALYZER_PLT is a variable understood directly by Dialyzer. +# +# We append the path to erts at the end of the PLT. This works +# because the PLT file is in the external term format and the +# function binary_to_term/1 ignores any trailing data. $(DIALYZER_PLT): deps app $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \ while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log)) - $(verbose) dialyzer --build_plt --apps erts kernel stdlib \ - $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) + $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \ + erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2 + $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@ plt: $(DIALYZER_PLT) @@ -6311,11 +6591,18 @@ distclean-plt: $(gen_verbose) rm -f $(DIALYZER_PLT) ifneq ($(wildcard $(DIALYZER_PLT)),) -dialyze: +dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app) + $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \ + grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \ + rm $(DIALYZER_PLT); \ + $(MAKE) plt; \ + fi else dialyze: $(DIALYZER_PLT) endif - $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) + $(verbose) dialyzer --no_native `$(ERL) \ + -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \ + -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/) # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -6331,7 +6618,7 @@ EDOC_OUTPUT ?= doc define edoc.erl SrcPaths = lists:foldl(fun(P, Acc) -> filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc - end, [], [$(call comma_list,$(patsubst %,'%',$(EDOC_SRC_DIRS)))]), + end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]), DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}], edoc:application($(1), ".", [$(2)] ++ DefaultOpts), halt(0). @@ -6360,6 +6647,7 @@ distclean-edoc: DTL_FULL_PATH ?= DTL_PATH ?= templates/ +DTL_PREFIX ?= DTL_SUFFIX ?= _dtl DTL_OPTS ?= @@ -6375,18 +6663,17 @@ DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl)) ifneq ($(DTL_FILES),) -DTL_NAMES = $(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)) +DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%))) DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES))) BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES))) ifneq ($(words $(DTL_FILES)),0) # Rebuild templates when the Makefile changes. -$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) - @mkdir -p $(ERLANG_MK_TMP) - @if test -f $@; then \ +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP) + $(verbose) if test -f $@; then \ touch $(DTL_FILES); \ fi - @touch $@ + $(verbose) touch $@ ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl endif @@ -6397,10 +6684,10 @@ define erlydtl_compile.erl "" -> filename:basename(F, ".dtl"); _ -> - "$(DTL_PATH)/" ++ F2 = filename:rootname(F, ".dtl"), + "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"), re:replace(F2, "/", "_", [{return, list}, global]) end, - Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of ok -> ok; {ok, _} -> ok @@ -6412,7 +6699,7 @@ endef ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/ $(if $(strip $?),\ $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\ - -pa ebin/ $(DEPS_DIR)/erlydtl/ebin/)) + -pa ebin/)) endif @@ -6445,13 +6732,15 @@ help:: # Plugin-specific targets. +escript-zip:: FULL=1 escript-zip:: deps app $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP)) $(verbose) rm -f $(ESCRIPT_ZIP_FILE) $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/* ifneq ($(DEPS),) $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \ - `cat $(ERLANG_MK_TMP)/deps.log | sed 's/^$(subst /,\/,$(DEPS_DIR))\///' | sed 's/$$/\/ebin\/\*/'` + $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \ + $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log))))) endif escript:: escript-zip @@ -6488,27 +6777,17 @@ help:: # Plugin-specific targets. define eunit.erl - case "$(COVER)" of - "" -> ok; - _ -> - case cover:compile_beam_directory("ebin") of - {error, _} -> halt(1); - _ -> ok - end - end, + $(call cover.erl) + CoverSetup(), case eunit:test($1, [$(EUNIT_OPTS)]) of ok -> ok; error -> halt(2) end, - case "$(COVER)" of - "" -> ok; - _ -> - cover:export("$(COVER_DATA_DIR)/eunit.coverdata") - end, + CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"), halt() endef -EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin +EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin ifdef t ifeq (,$(findstring :,$(t))) @@ -6525,11 +6804,13 @@ EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl))) EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)') -eunit: test-build $(if $(IS_APP),,apps-eunit) cover-data-dir +eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir +ifneq ($(wildcard src/ $(TEST_DIR)),) $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS)) +endif ifneq ($(ALL_APPS_DIRS),) -apps-eunit: +apps-eunit: test-build $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \ [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \ exit $$eunit_retcode @@ -6547,7 +6828,11 @@ ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper) tests:: proper define proper_check.erl - code:add_pathsa(["$(call core_native_path,$(CURDIR)/ebin)", "$(call core_native_path,$(DEPS_DIR)/*/ebin)"]), + $(call cover.erl) + code:add_pathsa([ + "$(call core_native_path,$(CURDIR)/ebin)", + "$(call core_native_path,$(DEPS_DIR)/*/ebin)", + "$(call core_native_path,$(TEST_DIR))"]), Module = fun(M) -> [true] =:= lists:usort([ case atom_to_list(F) of @@ -6559,13 +6844,16 @@ define proper_check.erl end || {F, 0} <- M:module_info(exports)]) end, - try - case $(1) of + try begin + CoverSetup(), + Res = case $(1) of all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]); module -> Module($(2)); function -> proper:quickcheck($(2), nocolors) - end - of + end, + CoverExport("$(COVER_DATA_DIR)/proper.coverdata"), + Res + end of true -> halt(0); _ -> halt(1) catch error:undef -> @@ -6576,20 +6864,79 @@ endef ifdef t ifeq (,$(findstring :,$(t))) -proper: test-build +proper: test-build cover-data-dir $(verbose) $(call erlang,$(call proper_check.erl,module,$(t))) else -proper: test-build +proper: test-build cover-data-dir $(verbose) echo Testing $(t)/0 $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)())) endif else -proper: test-build - $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) +proper: test-build cover-data-dir + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES))) endif endif +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +ifneq ($(wildcard src/),) +PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES)) +ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES)))) + +ifeq ($(PROTO_FILES),) +$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: + $(verbose) : +else +# Rebuild proto files when the Makefile changes. +# We exclude $(PROJECT).d to avoid a circular dependency. +$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP) + $(verbose) if test -f $@; then \ + touch $(PROTO_FILES); \ + fi + $(verbose) touch $@ + +$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs +endif + +ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),) +define compile_proto.erl + [begin + protobuffs_compile:generate_source(F, [ + {output_include_dir, "./include"}, + {output_src_dir, "./src"}]) + end || F <- string:tokens("$1", " ")], + halt(). +endef +else +define compile_proto.erl + [begin + gpb_compile:file(F, [ + {include_as_lib, true}, + {module_name_suffix, "_pb"}, + {o_hrl, "./include"}, + {o_erl, "./src"}]) + end || F <- string:tokens("$1", " ")], + halt(). +endef +endif + +ifneq ($(PROTO_FILES),) +$(PROJECT).d:: $(PROTO_FILES) + $(verbose) mkdir -p ebin/ include/ + $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?))) +endif +endif + # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -6600,7 +6947,7 @@ endif RELX ?= $(ERLANG_MK_TMP)/relx RELX_CONFIG ?= $(CURDIR)/relx.config -RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.23.0/relx +RELX_URL ?= https://erlang.mk/res/relx-v3.27.0 RELX_OPTS ?= RELX_OUTPUT_DIR ?= _rel RELX_REL_EXT ?= @@ -6630,20 +6977,29 @@ distclean:: distclean-relx-rel # Plugin-specific targets. -$(RELX): - $(verbose) mkdir -p $(ERLANG_MK_TMP) +$(RELX): | $(ERLANG_MK_TMP) $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) $(verbose) chmod +x $(RELX) relx-rel: $(RELX) rel-deps app - $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release $(if $(filter 1,$(RELX_TAR)),tar) + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release + $(verbose) $(MAKE) relx-post-rel +ifeq ($(RELX_TAR),1) + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar +endif relx-relup: $(RELX) rel-deps app - $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release relup $(if $(filter 1,$(RELX_TAR)),tar) + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release + $(MAKE) relx-post-rel + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar) distclean-relx-rel: $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) +# Default hooks. +relx-post-rel:: + $(verbose) : + # Run target. ifeq ($(wildcard $(RELX_CONFIG)),) @@ -6659,20 +7015,32 @@ define get_relx_release.erl {semver, _} -> ""; VsnStr -> Vsn0 end, - io:format("~s ~s", [Name, Vsn]), + Extended = case lists:keyfind(extended_start_script, 1, Config) of + {_, true} -> "1"; + _ -> "" + end, + io:format("~s ~s ~s", [Name, Vsn, Extended]), halt(0). endef RELX_REL := $(shell $(call erlang,$(get_relx_release.erl))) RELX_REL_NAME := $(word 1,$(RELX_REL)) RELX_REL_VSN := $(word 2,$(RELX_REL)) +RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console) ifeq ($(PLATFORM),msys2) RELX_REL_EXT := .cmd endif run:: all - $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) console + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD) + +ifdef RELOAD +rel:: + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \ + eval "io:format(\"~p~n\", [c:lm()])" +endif help:: $(verbose) printf "%s\n" "" \ @@ -6690,7 +7058,7 @@ endif # Configuration. SHELL_ERL ?= erl -SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR) SHELL_OPTS ?= ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) @@ -6706,10 +7074,21 @@ help:: $(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) +ifneq ($(SKIP_DEPS),) +build-shell-deps: +else build-shell-deps: $(ALL_SHELL_DEPS_DIRS) - $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \ + if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + else \ + $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ + fi \ + done +endif -shell: build-shell-deps +shell:: build-shell-deps $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) # Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net> @@ -6800,17 +7179,21 @@ ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) tests:: triq define triq_check.erl + $(call cover.erl) code:add_pathsa([ "$(call core_native_path,$(CURDIR)/ebin)", "$(call core_native_path,$(DEPS_DIR)/*/ebin)", "$(call core_native_path,$(TEST_DIR))"]), - try - case $(1) of + try begin + CoverSetup(), + Res = case $(1) of all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); module -> triq:check($(2)); function -> triq:check($(2)) - end - of + end, + CoverExport("$(COVER_DATA_DIR)/triq.coverdata"), + Res + end of true -> halt(0); _ -> halt(1) catch error:undef -> @@ -6821,15 +7204,15 @@ endef ifdef t ifeq (,$(findstring :,$(t))) -triq: test-build +triq: test-build cover-data-dir $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) else -triq: test-build +triq: test-build cover-data-dir $(verbose) echo Testing $(t)/0 $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) endif else -triq: test-build +triq: test-build cover-data-dir $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) @@ -6881,9 +7264,14 @@ distclean-xref: # This file is part of erlang.mk and subject to the terms of the ISC License. COVER_REPORT_DIR ?= cover -COVER_DATA_DIR ?= $(CURDIR) +COVER_DATA_DIR ?= $(COVER_REPORT_DIR) -# Hook in coverage to ct +ifdef COVER +COVER_APPS ?= $(notdir $(ALL_APPS_DIRS)) +COVER_DEPS ?= +endif + +# Code coverage for Common Test. ifdef COVER ifdef CT_RUN @@ -6893,13 +7281,44 @@ test-build:: $(TEST_DIR)/ct.cover.spec $(TEST_DIR)/ct.cover.spec: cover-data-dir $(gen_verbose) printf "%s\n" \ "{incl_app, '$(PROJECT)', details}." \ - '{export,"$(abspath $(COVER_DATA_DIR))/ct.coverdata"}.' > $@ + "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \ + $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \ + $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \ + '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@ CT_RUN += -cover $(TEST_DIR)/ct.cover.spec endif endif endif +# Code coverage for other tools. + +ifdef COVER +define cover.erl + CoverSetup = fun() -> + Dirs = ["$(call core_native_path,$(CURDIR)/ebin)" + $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)") + $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")], + [begin + case filelib:is_dir(Dir) of + false -> false; + true -> + case cover:compile_beam_directory(Dir) of + {error, _} -> halt(1); + _ -> true + end + end + end || Dir <- Dirs] + end, + CoverExport = fun(Filename) -> cover:export(Filename) end, +endef +else +define cover.erl + CoverSetup = fun() -> ok end, + CoverExport = fun(_) -> ok end, +endef +endif + # Core targets ifdef COVER @@ -6959,7 +7378,9 @@ ifneq ($(COVER_REPORT_DIR),) cover-report-clean: $(gen_verbose) rm -rf $(COVER_REPORT_DIR) +ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR)) $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR)) +endif ifeq ($(COVERDATA),) cover-report: @@ -7049,7 +7470,7 @@ __ARCHIVE_BELOW__ endef sfx: - $(call render_template,sfx_stub,$(SFX_OUTPUT_FILE)) + $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE)) $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE) $(verbose) chmod +x $(SFX_OUTPUT_FILE) @@ -7068,6 +7489,11 @@ $(foreach p,$(DEP_PLUGINS),\ $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ $(call core_dep_plugin,$p/plugins.mk,$p)))) +help:: help-plugins + +help-plugins:: + $(verbose) : + # Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu> # Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> # This file is part of erlang.mk and subject to the terms of the ISC License. @@ -7102,11 +7528,11 @@ else # # $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). -$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS) -$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) -$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) -$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) -$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) # Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of # dependencies with a single target. @@ -7129,9 +7555,8 @@ $(ERLANG_MK_RECURSIVE_DEPS_LIST) \ $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ -$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP) ifeq ($(IS_APP)$(IS_DEP),) - $(verbose) mkdir -p $(ERLANG_MK_TMP) $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST) endif ifndef IS_APP diff --git a/include/amqqueue.hrl b/include/amqqueue.hrl index 5f873082e5..3a9ac45cce 100644 --- a/include/amqqueue.hrl +++ b/include/amqqueue.hrl @@ -65,15 +65,15 @@ -define(amqqueue_has_valid_pid(Q), ((?is_amqqueue_v2(Q) andalso - is_pid(?amqqueue_v2_field_pid(Q))) orelse - (?is_amqqueue_v1(Q) andalso - is_pid(?amqqueue_v1_field_pid(Q))))). + is_pid(?amqqueue_v2_field_pid(Q))) orelse + (?is_amqqueue_v1(Q) andalso + is_pid(?amqqueue_v1_field_pid(Q))))). -define(amqqueue_pid_runs_on_local_node(Q), ((?is_amqqueue_v2(Q) andalso - node(?amqqueue_v2_field_pid(Q)) =:= node()) orelse - (?is_amqqueue_v1(Q) andalso - node(?amqqueue_v1_field_pid(Q)) =:= node()))). + node(?amqqueue_v2_field_pid(Q)) =:= node()) orelse + (?is_amqqueue_v1(Q) andalso + node(?amqqueue_v1_field_pid(Q)) =:= node()))). -define(amqqueue_pid_equals(Q, Pid), ((?is_amqqueue_v2(Q) andalso @@ -108,9 +108,9 @@ -define(amqqueue_vhost_equals(Q, VHost), ((?is_amqqueue_v2(Q) andalso - ?amqqueue_v2_vhost(Q) =:= VHost) orelse - (?is_amqqueue_v1(Q) andalso - ?amqqueue_v1_vhost(Q) =:= VHost))). + ?amqqueue_v2_vhost(Q) =:= VHost) orelse + (?is_amqqueue_v1(Q) andalso + ?amqqueue_v1_vhost(Q) =:= VHost))). -ifdef(DEBUG_QUORUM_QUEUE_FF). -define(enable_quorum_queue_if_debug, diff --git a/priv/schema/rabbit.schema b/priv/schema/rabbit.schema index c7171ce74a..44c199ff4d 100644 --- a/priv/schema/rabbit.schema +++ b/priv/schema/rabbit.schema @@ -391,6 +391,13 @@ end}. {datatype, integer} ]}. +%% Default worker process pool size. Used to limit maximum concurrency rate +%% of certain operations, e.g. queue initialisation and recovery on node boot. + +{mapping, "default_worker_pool_size", "rabbit.default_worker_pool_size", [ + {datatype, integer}, {validators, ["non_negative_integer"]} +]}. + %% Password hashing implementation. Will only affect newly %% created users. To recalculate hash for an existing user %% it's necessary to update her password. diff --git a/rabbitmq-components.mk b/rabbitmq-components.mk index a1817e3196..61e5c8112e 100644 --- a/rabbitmq-components.mk +++ b/rabbitmq-components.mk @@ -184,6 +184,15 @@ RABBITMQ_COMPONENTS = amqp_client \ rabbitmq_web_stomp_examples \ rabbitmq_website +# Erlang.mk does not rebuild dependencies by default, once they were +# compiled once, except for those listed in the `$(FORCE_REBUILD)` +# variable. +# +# We want all RabbitMQ components to always be rebuilt: this eases +# the work on several components at the same time. + +FORCE_REBUILD = $(RABBITMQ_COMPONENTS) + # Several components have a custom erlang.mk/build.config, mainly # to disable eunit. Therefore, we can't use the top-level project's # erlang.mk copy. diff --git a/scripts/rabbitmq-server b/scripts/rabbitmq-server index f9d7661edf..4a2737a5e3 100755 --- a/scripts/rabbitmq-server +++ b/scripts/rabbitmq-server @@ -194,6 +194,7 @@ RABBITMQ_DIST_PORT=$RABBITMQ_DIST_PORT \ -noinput \ -hidden \ -s rabbit_prelaunch \ + ${RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS} \ ${RABBITMQ_NAME_TYPE} ${RABBITMQ_PRELAUNCH_NODENAME} \ -conf_advanced "${RABBITMQ_ADVANCED_CONFIG_FILE}" \ -rabbit feature_flags_file "\"$RABBITMQ_FEATURE_FLAGS_FILE\"" \ diff --git a/src/amqqueue.erl b/src/amqqueue.erl index dda2642c34..35e7f0c4c4 100644 --- a/src/amqqueue.erl +++ b/src/amqqueue.erl @@ -19,7 +19,9 @@ -include_lib("rabbit_common/include/rabbit.hrl"). -include("amqqueue.hrl"). --export([new/9, +-export([new/8, + new/9, + new_with_version/9, new_with_version/10, fields/0, fields/1, @@ -184,6 +186,40 @@ pid() | none, rabbit_framing:amqp_table(), rabbit_types:vhost() | undefined, + map()) -> amqqueue(). + +new(#resource{kind = queue} = Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options) + when (is_pid(Pid) orelse is_tuple(Pid) orelse Pid =:= none) andalso + is_boolean(Durable) andalso + is_boolean(AutoDelete) andalso + (is_pid(Owner) orelse Owner =:= none) andalso + is_list(Args) andalso + (is_binary(VHost) orelse VHost =:= undefined) andalso + is_map(Options) -> + new(Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options, + ?amqqueue_v1_type). + +-spec new(rabbit_amqqueue:name(), + pid() | ra_server_id() | none, + boolean(), + boolean(), + pid() | none, + rabbit_framing:amqp_table(), + rabbit_types:vhost() | undefined, map(), atom()) -> amqqueue(). @@ -238,6 +274,44 @@ new(#resource{kind = queue} = Name, pid() | none, rabbit_framing:amqp_table(), rabbit_types:vhost() | undefined, + map()) -> amqqueue(). + +new_with_version(RecordVersion, + #resource{kind = queue} = Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options) + when (is_pid(Pid) orelse is_tuple(Pid) orelse Pid =:= none) andalso + is_boolean(Durable) andalso + is_boolean(AutoDelete) andalso + (is_pid(Owner) orelse Owner =:= none) andalso + is_list(Args) andalso + (is_binary(VHost) orelse VHost =:= undefined) andalso + is_map(Options) -> + new_with_version(RecordVersion, + Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options, + ?amqqueue_v1_type). + +-spec new_with_version +(amqqueue_v1 | amqqueue_v2, + rabbit_amqqueue:name(), + pid() | ra_server_id() | none, + boolean(), + boolean(), + pid() | none, + rabbit_framing:amqp_table(), + rabbit_types:vhost() | undefined, map(), atom()) -> amqqueue(). @@ -359,6 +433,8 @@ get_exclusive_owner(#amqqueue{exclusive_owner = Owner}) -> get_exclusive_owner(Queue) -> amqqueue_v1:get_exclusive_owner(Queue). +% gm_pids + -spec get_gm_pids(amqqueue()) -> [{pid(), pid()} | pid()] | none. get_gm_pids(#amqqueue{gm_pids = GMPids}) -> @@ -421,7 +497,7 @@ get_pid(#amqqueue{pid = Pid}) -> Pid; get_pid(Queue) -> amqqueue_v1:get_pid(Queue). -spec set_pid -(amqqueue_v2(), pid() | ra_server_id() | none) -> amqqueue_v2(); +(amqqueue_v2(), pid() | ra_server_id() | none) -> amqqueue_v2(); (amqqueue_v1:amqqueue_v1(), pid() | none) -> amqqueue_v1:amqqueue_v1(). set_pid(#amqqueue{} = Queue, Pid) -> @@ -509,7 +585,8 @@ set_slave_pids(Queue, SlavePids) -> -spec get_slave_pids_pending_shutdown(amqqueue()) -> [pid()]. -get_slave_pids_pending_shutdown(#amqqueue{slave_pids_pending_shutdown = Slaves}) -> +get_slave_pids_pending_shutdown( + #amqqueue{slave_pids_pending_shutdown = Slaves}) -> Slaves; get_slave_pids_pending_shutdown(Queue) -> amqqueue_v1:get_slave_pids_pending_shutdown(Queue). diff --git a/src/amqqueue_v1.erl b/src/amqqueue_v1.erl index 0a0073075f..9b739026c6 100644 --- a/src/amqqueue_v1.erl +++ b/src/amqqueue_v1.erl @@ -17,9 +17,12 @@ -module(amqqueue_v1). -include_lib("rabbit_common/include/resource.hrl"). +-include("amqqueue.hrl"). -export([new/8, + new/9, new_with_version/9, + new_with_version/10, fields/0, fields/1, field_vhost/0, @@ -37,7 +40,8 @@ % gm_pids get_gm_pids/1, set_gm_pids/2, - % name + get_leader/1, + % name (#resource) get_name/1, set_name/2, % operator_policy @@ -53,6 +57,9 @@ % policy_version get_policy_version/1, set_policy_version/2, + % quorum_nodes + get_quorum_nodes/1, + set_quorum_nodes/2, % recoverable_slaves get_recoverable_slaves/1, set_recoverable_slaves/2, @@ -68,14 +75,19 @@ % sync_slave_pids get_sync_slave_pids/1, set_sync_slave_pids/2, + get_type/1, get_vhost/1, is_amqqueue/1, is_auto_delete/1, is_durable/1, + is_classic/1, + is_quorum/1, pattern_match_all/0, pattern_match_on_name/1, + pattern_match_on_type/1, reset_mirroring_and_decorators/1, set_immutable/1, + qnode/1, macros/0]). -define(record_version, ?MODULE). @@ -192,6 +204,42 @@ new(#resource{kind = queue} = Name, VHost, Options). +-spec new(rabbit_amqqueue:name(), + pid() | none, + boolean(), + boolean(), + pid() | none, + rabbit_framing:amqp_table(), + rabbit_types:vhost() | undefined, + map(), + ?amqqueue_v1_type) -> amqqueue(). + +new(#resource{kind = queue} = Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options, + ?amqqueue_v1_type) + when (is_pid(Pid) orelse Pid =:= none) andalso + is_boolean(Durable) andalso + is_boolean(AutoDelete) andalso + (is_pid(Owner) orelse Owner =:= none) andalso + is_list(Args) andalso + (is_binary(VHost) orelse VHost =:= undefined) andalso + is_map(Options) -> + new( + Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options). + -spec new_with_version(amqqueue_v1, rabbit_amqqueue:name(), pid() | none, @@ -227,6 +275,45 @@ new_with_version(?record_version, vhost = VHost, options = Options}. +-spec new_with_version(amqqueue_v1, + rabbit_amqqueue:name(), + pid() | none, + boolean(), + boolean(), + pid() | none, + rabbit_framing:amqp_table(), + rabbit_types:vhost() | undefined, + map(), + ?amqqueue_v1_type) -> amqqueue(). + +new_with_version(?record_version, + #resource{kind = queue} = Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options, + ?amqqueue_v1_type) + when (is_pid(Pid) orelse Pid =:= none) andalso + is_boolean(Durable) andalso + is_boolean(AutoDelete) andalso + (is_pid(Owner) orelse Owner =:= none) andalso + is_list(Args) andalso + (is_binary(VHost) orelse VHost =:= undefined) andalso + is_map(Options) -> + new_with_version( + ?record_version, + Name, + Pid, + Durable, + AutoDelete, + Owner, + Args, + VHost, + Options). + -spec is_amqqueue(any()) -> boolean(). is_amqqueue(#amqqueue{}) -> true; @@ -239,8 +326,7 @@ record_version_to_use() -> -spec upgrade(amqqueue()) -> amqqueue(). -upgrade(#amqqueue{} = Queue) -> - Queue. +upgrade(#amqqueue{} = Queue) -> Queue. -spec upgrade_to(amqqueue_v1, amqqueue()) -> amqqueue(). @@ -260,66 +346,120 @@ set_arguments(#amqqueue{} = Queue, Args) -> % decorators +-spec get_decorators(amqqueue()) -> [atom()] | none | undefined. + get_decorators(#amqqueue{decorators = Decorators}) -> Decorators. +-spec set_decorators(amqqueue(), [atom()] | none | undefined) -> amqqueue(). + set_decorators(#amqqueue{} = Queue, Decorators) -> Queue#amqqueue{decorators = Decorators}. +-spec get_exclusive_owner(amqqueue()) -> pid() | none. + get_exclusive_owner(#amqqueue{exclusive_owner = Owner}) -> Owner. % gm_pids +-spec get_gm_pids(amqqueue()) -> [{pid(), pid()} | pid()] | none. + get_gm_pids(#amqqueue{gm_pids = GMPids}) -> GMPids. +-spec set_gm_pids(amqqueue(), [{pid(), pid()} | pid()] | none) -> amqqueue(). + set_gm_pids(#amqqueue{} = Queue, GMPids) -> Queue#amqqueue{gm_pids = GMPids}. -% name - -get_name(#amqqueue{name = Name}) -> Name. +-spec get_leader(amqqueue_v1()) -> no_return(). -set_name(#amqqueue{} = Queue, Name) -> - Queue#amqqueue{name = Name}. +get_leader(_) -> throw({unsupported, ?record_version, get_leader}). % operator_policy +-spec get_operator_policy(amqqueue()) -> binary() | none | undefined. + get_operator_policy(#amqqueue{operator_policy = OpPolicy}) -> OpPolicy. +-spec set_operator_policy(amqqueue(), binary() | none | undefined) -> + amqqueue(). + set_operator_policy(#amqqueue{} = Queue, OpPolicy) -> Queue#amqqueue{operator_policy = OpPolicy}. +% name + +-spec get_name(amqqueue()) -> rabbit_amqqueue:name(). + +get_name(#amqqueue{name = Name}) -> Name. + +-spec set_name(amqqueue(), rabbit_amqqueue:name()) -> amqqueue(). + +set_name(#amqqueue{} = Queue, Name) -> + Queue#amqqueue{name = Name}. + +-spec get_options(amqqueue()) -> map(). + get_options(#amqqueue{options = Options}) -> Options. % pid +-spec get_pid +(amqqueue_v1:amqqueue_v1()) -> pid() | none. + get_pid(#amqqueue{pid = Pid}) -> Pid. +-spec set_pid +(amqqueue_v1:amqqueue_v1(), pid() | none) -> amqqueue_v1:amqqueue_v1(). + set_pid(#amqqueue{} = Queue, Pid) -> Queue#amqqueue{pid = Pid}. % policy +-spec get_policy(amqqueue()) -> proplists:proplist() | none | undefined. + get_policy(#amqqueue{policy = Policy}) -> Policy. +-spec set_policy(amqqueue(), binary() | none | undefined) -> amqqueue(). + set_policy(#amqqueue{} = Queue, Policy) -> Queue#amqqueue{policy = Policy}. % policy_version +-spec get_policy_version(amqqueue()) -> non_neg_integer(). + get_policy_version(#amqqueue{policy_version = PV}) -> PV. +-spec set_policy_version(amqqueue(), non_neg_integer()) -> amqqueue(). + set_policy_version(#amqqueue{} = Queue, PV) -> Queue#amqqueue{policy_version = PV}. % recoverable_slaves +-spec get_recoverable_slaves(amqqueue()) -> [atom()] | none. + get_recoverable_slaves(#amqqueue{recoverable_slaves = Slaves}) -> Slaves. +-spec set_recoverable_slaves(amqqueue(), [atom()] | none) -> amqqueue(). + set_recoverable_slaves(#amqqueue{} = Queue, Slaves) -> Queue#amqqueue{recoverable_slaves = Slaves}. +% quorum_nodes (new in v2) + +-spec get_quorum_nodes(amqqueue()) -> no_return(). + +get_quorum_nodes(_) -> throw({unsupported, ?record_version, get_quorum_nodes}). + +-spec set_quorum_nodes(amqqueue(), [node()]) -> no_return(). + +set_quorum_nodes(_, _) -> + throw({unsupported, ?record_version, set_quorum_nodes}). + % slave_pids get_slave_pids(#amqqueue{slave_pids = Slaves}) -> @@ -330,7 +470,8 @@ set_slave_pids(#amqqueue{} = Queue, SlavePids) -> % slave_pids_pending_shutdown -get_slave_pids_pending_shutdown(#amqqueue{slave_pids_pending_shutdown = Slaves}) -> +get_slave_pids_pending_shutdown( + #amqqueue{slave_pids_pending_shutdown = Slaves}) -> Slaves. set_slave_pids_pending_shutdown(#amqqueue{} = Queue, SlavePids) -> @@ -338,23 +479,55 @@ set_slave_pids_pending_shutdown(#amqqueue{} = Queue, SlavePids) -> % state +-spec get_state(amqqueue()) -> atom() | none. + get_state(#amqqueue{state = State}) -> State. -set_state(#amqqueue{} = Queue, State) -> Queue#amqqueue{state = State}. +-spec set_state(amqqueue(), atom() | none) -> amqqueue(). + +set_state(#amqqueue{} = Queue, State) -> + Queue#amqqueue{state = State}. % sync_slave_pids -get_sync_slave_pids(#amqqueue{sync_slave_pids = Pids}) -> Pids. +-spec get_sync_slave_pids(amqqueue()) -> [pid()] | none. + +get_sync_slave_pids(#amqqueue{sync_slave_pids = Pids}) -> + Pids. + +-spec set_sync_slave_pids(amqqueue(), [pid()] | none) -> amqqueue(). set_sync_slave_pids(#amqqueue{} = Queue, Pids) -> Queue#amqqueue{sync_slave_pids = Pids}. +%% New in v2. + +-spec get_type(amqqueue()) -> atom(). + +get_type(Queue) when ?is_amqqueue(Queue) -> ?amqqueue_v1_type. + +-spec get_vhost(amqqueue()) -> rabbit_types:vhost() | undefined. + get_vhost(#amqqueue{vhost = VHost}) -> VHost. +-spec is_auto_delete(amqqueue()) -> boolean(). + is_auto_delete(#amqqueue{auto_delete = AutoDelete}) -> AutoDelete. +-spec is_durable(amqqueue()) -> boolean(). + is_durable(#amqqueue{durable = Durable}) -> Durable. +-spec is_classic(amqqueue()) -> boolean(). + +is_classic(Queue) -> + get_type(Queue) =:= ?amqqueue_v1_type. + +-spec is_quorum(amqqueue()) -> boolean(). + +is_quorum(Queue) -> + get_type(Queue) =:= quorum. + fields() -> fields(?record_version). fields(?record_version) -> record_info(fields, amqqueue). @@ -370,6 +543,11 @@ pattern_match_all() -> #amqqueue{_ = '_'}. pattern_match_on_name(Name) -> #amqqueue{name = Name, _ = '_'}. +-spec pattern_match_on_type(atom()) -> no_return(). + +pattern_match_on_type(_) -> + throw({unsupported, ?record_version, pattern_match_on_type}). + reset_mirroring_and_decorators(#amqqueue{} = Queue) -> Queue#amqqueue{slave_pids = [], sync_slave_pids = [], @@ -386,6 +564,14 @@ set_immutable(#amqqueue{} = Queue) -> decorators = none, state = none}. +-spec qnode(amqqueue() | pid()) -> node(). + +qnode(Queue) when ?is_amqqueue(Queue) -> + QPid = get_pid(Queue), + qnode(QPid); +qnode(QPid) when is_pid(QPid) -> + node(QPid). + macros() -> io:format( "-define(is_~s(Q), is_record(Q, amqqueue, ~b)).~n~n", diff --git a/src/rabbit.erl b/src/rabbit.erl index 4a974fd682..fdf2138515 100644 --- a/src/rabbit.erl +++ b/src/rabbit.erl @@ -33,8 +33,6 @@ -export([log_locations/0, config_files/0, decrypt_config/2]). %% for testing and mgmt-agent -export([is_booted/1, is_booted/0, is_booting/1, is_booting/0]). --deprecated([{force_event_refresh, 1, eventually}]). - -ifdef(TEST). -export([start_logger/0]). @@ -539,6 +537,12 @@ start_loaded_apps(Apps, RestartTypes) -> application:set_env(ra, logger_module, rabbit_log_ra_shim), %% use a larger segments size for queues application:set_env(ra, segment_max_entries, 32768), + case application:get_env(ra, wal_max_size_bytes) of + undefined -> + application:set_env(ra, wal_max_size_bytes, 536870912); %% 5 * 2 ^ 20 + _ -> + ok + end, ConfigEntryDecoder = case application:get_env(rabbit, config_entry_decoder) of undefined -> []; @@ -1117,17 +1121,16 @@ start_logger() -> log_locations() -> rabbit_lager:log_locations(). -%% This feature was used by the management API up-to and including -%% RabbitMQ 3.7.x. It is unused in 3.8.x and thus deprecated. We keep it -%% to support in-place upgrades to 3.8.x (i.e. mixed-version clusters). - -spec force_event_refresh(reference()) -> 'ok'. +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started force_event_refresh(Ref) -> - rabbit_direct:force_event_refresh(Ref), - rabbit_networking:force_connection_event_refresh(Ref), - rabbit_channel:force_event_refresh(Ref), - rabbit_amqqueue:force_event_refresh(Ref). + ok = rabbit_direct:force_event_refresh(Ref), + ok = rabbit_networking:force_connection_event_refresh(Ref), + ok = rabbit_channel:force_event_refresh(Ref), + ok = rabbit_amqqueue:force_event_refresh(Ref). %%--------------------------------------------------------------------------- %% misc diff --git a/src/rabbit_access_control.erl b/src/rabbit_access_control.erl index d04f0047de..4c68fe2eab 100644 --- a/src/rabbit_access_control.erl +++ b/src/rabbit_access_control.erl @@ -21,6 +21,8 @@ -export([check_user_pass_login/2, check_user_login/2, check_user_loopback/2, check_vhost_access/4, check_resource_access/4, check_topic_access/4]). +-export([permission_cache_can_expire/1, update_state/2]). + %%---------------------------------------------------------------------------- -export_type([permission_atom/0]). @@ -217,3 +219,38 @@ check_access(Fun, Module, ErrStr, ErrArgs, ErrName) -> rabbit_log:error(FullErrStr, FullErrArgs), rabbit_misc:protocol_error(ErrName, FullErrStr, FullErrArgs) end. + +-spec update_state(User :: rabbit_types:user(), NewState :: term()) -> + {'ok', rabbit_types:auth_user()} | + {'refused', string()} | + {'error', any()}. + +update_state(User = #user{authz_backends = Backends0}, NewState) -> + %% N.B.: we use foldl/3 and prepending, so the final list of + %% backends is in reverse order from the original list. + Backends = lists:foldl( + fun({Module, Impl}, {ok, Acc}) -> + case Module:state_can_expire() of + true -> + case Module:update_state(auth_user(User, Impl), NewState) of + {ok, #auth_user{impl = Impl1}} -> + {ok, [{Module, Impl1} | Acc]}; + Else -> Else + end; + false -> + {ok, [{Module, Impl} | Acc]} + end; + (_, {error, _} = Err) -> Err; + (_, {refused, _, _} = Err) -> Err + end, {ok, []}, Backends0), + case Backends of + {ok, Pairs} -> {ok, User#user{authz_backends = lists:reverse(Pairs)}}; + Else -> Else + end. + +-spec permission_cache_can_expire(User :: rabbit_types:user()) -> boolean(). + +%% Returns true if any of the backends support credential expiration, +%% otherwise returns false. +permission_cache_can_expire(#user{authz_backends = Backends}) -> + lists:any(fun ({Module, _State}) -> Module:state_can_expire() end, Backends). diff --git a/src/rabbit_amqqueue.erl b/src/rabbit_amqqueue.erl index bfacef6d49..cbe8738c5a 100644 --- a/src/rabbit_amqqueue.erl +++ b/src/rabbit_amqqueue.erl @@ -52,8 +52,6 @@ -export([pid_of/1, pid_of/2]). -export([mark_local_durable_queues_stopped/1]). --deprecated([{force_event_refresh, 1, eventually}]). - %% internal -export([internal_declare/2, internal_delete/2, run_backing_queue/3, set_ram_duration_target/2, set_maximum_since_use/2, @@ -1037,6 +1035,9 @@ list_local(VHostPath) -> -spec force_event_refresh(reference()) -> 'ok'. +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started force_event_refresh(Ref) -> [gen_server2:cast(amqqueue:get_pid(Q), {force_event_refresh, Ref}) || Q <- list()], diff --git a/src/rabbit_amqqueue_process.erl b/src/rabbit_amqqueue_process.erl index 2185d7c95f..46db98ba5a 100644 --- a/src/rabbit_amqqueue_process.erl +++ b/src/rabbit_amqqueue_process.erl @@ -1615,6 +1615,9 @@ handle_cast({credit, ChPid, CTag, Credit, Drain}, run_message_queue(true, State1) end); +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started handle_cast({force_event_refresh, Ref}, State = #q{consumers = Consumers, active_consumer = Holder}) -> diff --git a/src/rabbit_auth_backend_internal.erl b/src/rabbit_auth_backend_internal.erl index e16b14734b..e675ad188b 100644 --- a/src/rabbit_auth_backend_internal.erl +++ b/src/rabbit_auth_backend_internal.erl @@ -40,6 +40,8 @@ list_user_vhost_permissions/2, list_user_topic_permissions/1, list_vhost_topic_permissions/1, list_user_vhost_topic_permissions/2]). +-export([state_can_expire/0]). + %% for testing -export([hashing_module_for_user/1, expand_topic_permission/2]). @@ -93,6 +95,8 @@ user_login_authentication(Username, AuthProps) -> false -> exit({unknown_auth_props, Username, AuthProps}) end. +state_can_expire() -> false. + user_login_authorization(Username, _AuthProps) -> case user_login_authentication(Username, []) of {ok, #auth_user{impl = Impl, tags = Tags}} -> {ok, Impl, Tags}; @@ -123,7 +127,7 @@ check_vhost_access(#auth_user{username = Username}, VHostPath, _AuthzData) -> check_resource_access(#auth_user{username = Username}, #resource{virtual_host = VHostPath, name = Name}, - Permission, + Permission, _AuthContext) -> case mnesia:dirty_read({rabbit_user_permission, #user_vhost{username = Username, diff --git a/src/rabbit_auth_mechanism_plain.erl b/src/rabbit_auth_mechanism_plain.erl index cfc1a0ce18..706e5eedfa 100644 --- a/src/rabbit_auth_mechanism_plain.erl +++ b/src/rabbit_auth_mechanism_plain.erl @@ -31,9 +31,6 @@ %% SASL PLAIN, as used by the Qpid Java client and our clients. Also, %% apparently, by OpenAMQ. -%% TODO: reimplement this using the binary module? - that makes use of -%% BIFs to do binary matching and will thus be much faster. - description() -> [{description, <<"SASL PLAIN authentication mechanism">>}]. diff --git a/src/rabbit_channel.erl b/src/rabbit_channel.erl index f5c9e8dfce..d16929d962 100644 --- a/src/rabbit_channel.erl +++ b/src/rabbit_channel.erl @@ -63,15 +63,13 @@ -export([refresh_config_local/0, ready_for_close/1]). -export([refresh_interceptors/0]). -export([force_event_refresh/1]). --export([source/2]). +-export([source/2, update_user_state/2]). -export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2, handle_info/2, handle_pre_hibernate/1, handle_post_hibernate/1, prioritise_call/4, prioritise_cast/3, prioritise_info/3, format_message_queue/2]). --deprecated([{force_event_refresh, 1, eventually}]). - %% Internal -export([list_local/0, emit_info_local/3, deliver_reply_local/3]). -export([get_vhost/1, get_user/1]). @@ -452,6 +450,9 @@ ready_for_close(Pid) -> -spec force_event_refresh(reference()) -> 'ok'. +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started force_event_refresh(Ref) -> [gen_server2:cast(C, {force_event_refresh, Ref}) || C <- list()], ok. @@ -459,11 +460,21 @@ force_event_refresh(Ref) -> list_queue_states(Pid) -> gen_server2:call(Pid, list_queue_states). --spec source(pid(), any()) -> any(). +-spec source(pid(), any()) -> 'ok' | {error, channel_terminated}. source(Pid, Source) when is_pid(Pid) -> case erlang:is_process_alive(Pid) of - true -> Pid ! {channel_source, Source}; + true -> Pid ! {channel_source, Source}, + ok; + false -> {error, channel_terminated} + end. + +-spec update_user_state(pid(), rabbit_types:auth_user()) -> 'ok' | {error, channel_terminated}. + +update_user_state(Pid, UserState) when is_pid(Pid) -> + case erlang:is_process_alive(Pid) of + true -> Pid ! {update_user_state, UserState}, + ok; false -> {error, channel_terminated} end. @@ -489,6 +500,8 @@ init([Channel, ReaderPid, WriterPid, ConnPid, ConnName, Protocol, User, VHost, _ -> Limiter0 end, + %% Process dictionary is used here because permission cache already uses it. MK. + put(permission_cache_can_expire, rabbit_access_control:permission_cache_can_expire(User)), MaxMessageSize = get_max_message_size(), ConsumerTimeout = get_consumer_timeout(), State = #ch{cfg = #conf{state = starting, @@ -691,6 +704,9 @@ handle_cast({send_drained, CTagCredit}, || {ConsumerTag, CreditDrained} <- CTagCredit], noreply(State); +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started handle_cast({force_event_refresh, Ref}, State) -> rabbit_event:notify(channel_created, infos(?CREATION_EVENT_KEYS, State), Ref), @@ -838,6 +854,10 @@ handle_info({{Ref, Node}, LateAnswer}, noreply(State); handle_info(tick, State0 = #ch{queue_states = QueueStates0}) -> + case get(permission_cache_can_expire) of + true -> ok = clear_permission_cache(); + _ -> ok + end, QueueStates1 = maps:filter(fun(_, QS) -> QName = rabbit_quorum_queue:queue_name(QS), @@ -850,7 +870,10 @@ handle_info(tick, State0 = #ch{queue_states = QueueStates0}) -> Return end; handle_info({channel_source, Source}, State = #ch{cfg = Cfg}) -> - noreply(State#ch{cfg = Cfg#conf{source = Source}}). + noreply(State#ch{cfg = Cfg#conf{source = Source}}); +handle_info({update_user_state, User}, State = #ch{cfg = Cfg}) -> + noreply(State#ch{cfg = Cfg#conf{user = User}}). + handle_pre_hibernate(State0) -> ok = clear_permission_cache(), @@ -973,7 +996,7 @@ return_queue_declare_ok(#resource{name = ActualName}, check_resource_access(User, Resource, Perm, Context) -> V = {Resource, Context, Perm}, - + Cache = case get(permission_cache) of undefined -> []; Other -> Other @@ -1051,8 +1074,8 @@ check_topic_authorisation(_, _, _, _, _, _) -> ok. check_topic_authorisation(#exchange{name = Name = #resource{virtual_host = VHost}, type = topic}, - User = #user{username = Username}, - AmqpParams, RoutingKey, Permission) -> + User = #user{username = Username}, + AmqpParams, RoutingKey, Permission) -> Resource = Name#resource{kind = topic}, VariableMap = build_topic_variable_map(AmqpParams, VHost, Username), Context = #{routing_key => RoutingKey, diff --git a/src/rabbit_direct.erl b/src/rabbit_direct.erl index b918d2e671..b84b4d91bb 100644 --- a/src/rabbit_direct.erl +++ b/src/rabbit_direct.erl @@ -132,7 +132,9 @@ extract_protocol(Infos) -> maybe_call_connection_info_module(Protocol, Creds, VHost, Pid, Infos) -> Module = rabbit_data_coercion:to_atom(string:to_lower( - "rabbit_" ++ rabbit_data_coercion:to_list(Protocol) ++ "_connection_info") + "rabbit_" ++ + lists:flatten(string:replace(rabbit_data_coercion:to_list(Protocol), " ", "_", all)) ++ + "_connection_info") ), Args = [Creds, VHost, Pid, Infos], code_server_cache:maybe_call_mfa(Module, additional_authn_params, Args, []). diff --git a/src/rabbit_fifo.erl b/src/rabbit_fifo.erl index d9566ea8f7..cfb1ac7fda 100644 --- a/src/rabbit_fifo.erl +++ b/src/rabbit_fifo.erl @@ -64,7 +64,9 @@ make_credit/4, make_purge/0, make_purge_nodes/1, - make_update_config/1 + make_update_config/1, + + from_log/2 ]). %% command records representing all the protocol actions that are supported @@ -754,6 +756,24 @@ usage(Name) when is_atom(Name) -> [{_, Use}] -> Use end. +from_log(Log, State0) -> + lists:foldl( + fun ({Idx, Term, {'$usr', Meta0, Cmd, _}}, {S0, Effs}) -> + Meta = Meta0#{index => Idx, + term => Term}, + case apply(Meta, Cmd, S0) of + {S, _, E} when is_list(E) -> + {S, Effs ++ E}; + {S, _, E} -> + {S, Effs ++ [E]}; + {S, _} -> + {S, Effs} + end; + (_, Acc) -> + Acc + end, {State0, []}, Log). + + %%% Internal messages_ready(#?MODULE{messages = M, diff --git a/src/rabbit_log_tail.erl b/src/rabbit_log_tail.erl new file mode 100644 index 0000000000..555164213b --- /dev/null +++ b/src/rabbit_log_tail.erl @@ -0,0 +1,95 @@ +-module(rabbit_log_tail). + +-export([tail_n_lines/2]). +-export([init_tail_stream/4]). + +-define(GUESS_OFFSET, 200). + +init_tail_stream(Filename, Pid, Ref, Duration) -> + RPCProc = self(), + Reader = spawn(fun() -> + link(Pid), + case file:open(Filename, [read, binary]) of + {ok, File} -> + TimeLimit = case Duration of + infinity -> infinity; + _ -> erlang:system_time(second) + Duration + end, + {ok, _} = file:position(File, eof), + RPCProc ! {Ref, opened}, + read_loop(File, Pid, Ref, TimeLimit); + {error, _} = Err -> + RPCProc ! {Ref, Err} + end + end), + receive + {Ref, opened} -> {ok, Ref}; + {Ref, {error, Err}} -> {error, Err} + after 5000 -> + exit(Reader, timeout), + {error, timeout} + end. + +read_loop(File, Pid, Ref, TimeLimit) -> + case is_integer(TimeLimit) andalso erlang:system_time(second) > TimeLimit of + true -> Pid ! {Ref, <<>>, finished}; + false -> + case file:read(File, ?GUESS_OFFSET) of + {ok, Data} -> + Pid ! {Ref, Data, confinue}, + read_loop(File, Pid, Ref, TimeLimit); + eof -> + timer:sleep(1000), + read_loop(File, Pid, Ref, TimeLimit); + {error, _} = Err -> + Pid ! {Ref, Err, finished} + end + end. + +tail_n_lines(Filename, N) -> + case file:open(Filename, [read, binary]) of + {ok, File} -> + {ok, Eof} = file:position(File, eof), + %% Eof may move. Only read up to the current one. + Result = reverse_read_n_lines(N, N, File, Eof, Eof), + file:close(File), + Result; + {error, _} = Error -> Error + end. + +reverse_read_n_lines(N, OffsetN, File, Position, Eof) -> + GuessPosition = offset(Position, OffsetN), + case read_lines_from_position(File, GuessPosition, Eof) of + {ok, Lines} -> + NLines = length(Lines), + case {NLines >= N, GuessPosition == 0} of + %% Take only N lines if there is more + {true, _} -> lists:nthtail(NLines - N, Lines); + %% Safe to assume that NLines is less then N + {_, true} -> Lines; + %% Adjust position + _ -> + reverse_read_n_lines(N, N - NLines + 1, File, GuessPosition, Eof) + end; + {error, _} = Error -> Error + end. + +read_from_position(File, GuessPosition, Eof) -> + file:pread(File, GuessPosition, max(0, Eof - GuessPosition)). + +read_lines_from_position(File, GuessPosition, Eof) -> + case read_from_position(File, GuessPosition, Eof) of + {ok, Data} -> + Lines = binary:split(Data, <<"\n">>, [global, trim]), + case {GuessPosition, Lines} of + %% If position is 0 - there are no partial lines + {0, _} -> {ok, Lines}; + %% Remove first line as it can be partial + {_, [_ | Rest]} -> {ok, Rest}; + {_, []} -> {ok, []} + end; + {error, _} = Error -> Error + end. + +offset(Base, N) -> + max(0, Base - N * ?GUESS_OFFSET).
\ No newline at end of file diff --git a/src/rabbit_mnesia.erl b/src/rabbit_mnesia.erl index 162badc33d..bd1c26f20f 100644 --- a/src/rabbit_mnesia.erl +++ b/src/rabbit_mnesia.erl @@ -45,7 +45,10 @@ %% Hooks used in `rabbit_node_monitor' on_node_up/1, - on_node_down/1 + on_node_down/1, + + %% Helpers for diagnostics commands + schema_info/1 ]). %% Used internally in rpc calls @@ -755,6 +758,18 @@ running_disc_nodes() -> ordsets:from_list(RunningNodes))). %%-------------------------------------------------------------------- +%% Helpers for diagnostics commands +%%-------------------------------------------------------------------- + +schema_info(Items) -> + Tables = mnesia:system_info(tables), + [info(Table, Items) || Table <- Tables]. + +info(Table, Items) -> + All = [{name, Table} | mnesia:table_info(Table, all)], + [{Item, proplists:get_value(Item, All)} || Item <- Items]. + +%%-------------------------------------------------------------------- %% Internal helpers %%-------------------------------------------------------------------- diff --git a/src/rabbit_quorum_queue.erl b/src/rabbit_quorum_queue.erl index 75bd9515f1..eb34b9db9f 100644 --- a/src/rabbit_quorum_queue.erl +++ b/src/rabbit_quorum_queue.erl @@ -32,7 +32,7 @@ -export([rpc_delete_metrics/1]). -export([format/1]). -export([open_files/1]). --export([add_member/3]). +-export([add_member/4]). -export([delete_member/3]). -export([requeue/3]). -export([policy_changed/2]). @@ -69,6 +69,7 @@ -define(RPC_TIMEOUT, 1000). -define(TICK_TIMEOUT, 5000). %% the ra server tick time -define(DELETE_TIMEOUT, 5000). +-define(ADD_MEMBER_TIMEOUT, 5000). %%---------------------------------------------------------------------------- @@ -119,23 +120,9 @@ declare(Q) when ?amqqueue_is_quorum(Q) -> NewQ1 = amqqueue:set_quorum_nodes(NewQ0, Nodes), case rabbit_amqqueue:internal_declare(NewQ1, false) of {created, NewQ} -> - RaMachine = ra_machine(NewQ), - ServerIds = [{RaName, Node} || Node <- Nodes], - ClusterName = RaName, TickTimeout = application:get_env(rabbit, quorum_tick_interval, ?TICK_TIMEOUT), - RaConfs = [begin - UId = ra:new_uid(ra_lib:to_binary(ClusterName)), - FName = rabbit_misc:rs(QName), - #{cluster_name => ClusterName, - id => ServerId, - uid => UId, - friendly_name => FName, - initial_members => ServerIds, - log_init_args => #{uid => UId}, - tick_timeout => TickTimeout, - machine => RaMachine} - end || ServerId <- ServerIds], - + RaConfs = [make_ra_conf(NewQ, ServerId, TickTimeout) + || ServerId <- members(NewQ)], case ra:start_cluster(RaConfs) of {ok, _, _} -> rabbit_event:notify(queue_created, @@ -323,7 +310,6 @@ reductions(Name) -> recover(Queues) -> [begin {Name, _} = amqqueue:get_pid(Q0), - Nodes = amqqueue:get_quorum_nodes(Q0), case ra:restart_server({Name, node()}) of ok -> % queue was restarted, good @@ -333,10 +319,12 @@ recover(Queues) -> Err1 == name_not_registered -> % queue was never started on this node % so needs to be started from scratch. - Machine = ra_machine(Q0), - RaNodes = [{Name, Node} || Node <- Nodes], - case ra:start_server(Name, {Name, node()}, Machine, RaNodes) of - ok -> ok; + TickTimeout = application:get_env(rabbit, quorum_tick_interval, + ?TICK_TIMEOUT), + Conf = make_ra_conf(Q0, {Name, node()}, TickTimeout), + case ra:start_server(Conf) of + ok -> + ok; Err2 -> rabbit_log:warning("recover: quorum queue ~w could not" " be started ~w", [Name, Err2]), @@ -698,7 +686,7 @@ get_sys_status(Proc) -> end. -add_member(VHost, Name, Node) -> +add_member(VHost, Name, Node, Timeout) -> QName = #resource{virtual_host = VHost, name = Name, kind = queue}, case rabbit_amqqueue:lookup(QName) of {ok, Q} when ?amqqueue_is_classic(Q) -> @@ -714,23 +702,25 @@ add_member(VHost, Name, Node) -> %% idempotent by design ok; false -> - add_member(Q, Node) + add_member(Q, Node, Timeout) end end; {error, not_found} = E -> E end. -add_member(Q, Node) when ?amqqueue_is_quorum(Q) -> - {RaName, _} = ServerRef = amqqueue:get_pid(Q), +add_member(Q, Node, Timeout) when ?amqqueue_is_quorum(Q) -> + {RaName, _} = amqqueue:get_pid(Q), QName = amqqueue:get_name(Q), - QNodes = amqqueue:get_quorum_nodes(Q), %% TODO parallel calls might crash this, or add a duplicate in quorum_nodes ServerId = {RaName, Node}, - case ra:start_server(RaName, ServerId, ra_machine(Q), - [{RaName, N} || N <- QNodes]) of + Members = members(Q), + TickTimeout = application:get_env(rabbit, quorum_tick_interval, + ?TICK_TIMEOUT), + Conf = make_ra_conf(Q, ServerId, TickTimeout), + case ra:start_server(Conf) of ok -> - case ra:add_member(ServerRef, ServerId) of + case ra:add_member(Members, ServerId, Timeout) of {ok, _, Leader} -> Fun = fun(Q1) -> Q2 = amqqueue:set_quorum_nodes( @@ -742,9 +732,11 @@ add_member(Q, Node) when ?amqqueue_is_quorum(Q) -> fun() -> rabbit_amqqueue:update(QName, Fun) end), ok; {timeout, _} -> + _ = ra:force_delete_server(ServerId), + _ = ra:remove_member(Members, ServerId), {error, timeout}; E -> - %% TODO should we stop the ra process here? + _ = ra:force_delete_server(ServerId), E end; E -> @@ -769,16 +761,18 @@ delete_member(VHost, Name, Node) -> E end. + delete_member(Q, Node) when ?amqqueue_is_quorum(Q) -> QName = amqqueue:get_name(Q), {RaName, _} = amqqueue:get_pid(Q), ServerId = {RaName, Node}, - case amqqueue:get_quorum_nodes(Q) of - [Node] -> + case members(Q) of + [{_, Node}] -> + %% deleting the last member is not allowed {error, last_node}; - _ -> - case ra:leave_and_delete_server(amqqueue:get_pid(Q), ServerId) of + Members -> + case ra:leave_and_delete_server(Members, ServerId) of ok -> Fun = fun(Q1) -> amqqueue:set_quorum_nodes( @@ -827,7 +821,7 @@ grow(Node, VhostSpec, QueueSpec, Strategy) -> QName = amqqueue:get_name(Q), rabbit_log:info("~s: adding a new member (replica) on node ~w", [rabbit_misc:rs(QName), Node]), - case add_member(Q, Node) of + case add_member(Q, Node, ?ADD_MEMBER_TIMEOUT) of ok -> {QName, {ok, Size + 1}}; {error, Err} -> @@ -1142,3 +1136,26 @@ select_quorum_nodes(0, _, Selected) -> select_quorum_nodes(Size, Rest, Selected) -> S = lists:nth(rand:uniform(length(Rest)), Rest), select_quorum_nodes(Size - 1, lists:delete(S, Rest), [S | Selected]). + +%% member with the current leader first +members(Q) when ?amqqueue_is_quorum(Q) -> + {RaName, LeaderNode} = amqqueue:get_pid(Q), + Nodes = lists:delete(LeaderNode, amqqueue:get_quorum_nodes(Q)), + [{RaName, N} || N <- [LeaderNode | Nodes]]. + +make_ra_conf(Q, ServerId, TickTimeout) -> + QName = amqqueue:get_name(Q), + RaMachine = ra_machine(Q), + [{ClusterName, _} | _] = Members = members(Q), + UId = ra:new_uid(ra_lib:to_binary(ClusterName)), + FName = rabbit_misc:rs(QName), + #{cluster_name => ClusterName, + id => ServerId, + uid => UId, + friendly_name => FName, + metrics_key => QName, + initial_members => Members, + log_init_args => #{uid => UId}, + tick_timeout => TickTimeout, + machine => RaMachine}. + diff --git a/src/rabbit_reader.erl b/src/rabbit_reader.erl index 8f64a70b5f..39ac0ef8ac 100644 --- a/src/rabbit_reader.erl +++ b/src/rabbit_reader.erl @@ -66,8 +66,6 @@ -export([conserve_resources/3, server_properties/1]). --deprecated([{force_event_refresh, 2, eventually}]). - -define(NORMAL_TIMEOUT, 3). -define(CLOSING_TIMEOUT, 30). -define(CHANNEL_TERMINATION_TIMEOUT, 3). @@ -220,6 +218,9 @@ info(Pid, Items) -> -spec force_event_refresh(pid(), reference()) -> 'ok'. +% Note: https://www.pivotaltracker.com/story/show/166962656 +% This event is necessary for the stats timer to be initialized with +% the correct values once the management agent has started force_event_refresh(Pid, Ref) -> gen_server:cast(Pid, {force_event_refresh, Ref}). @@ -1273,6 +1274,44 @@ handle_method0(#'connection.close_ok'{}, State = #v1{connection_state = closed}) -> self() ! terminate_connection, State; +handle_method0(#'connection.update_secret'{new_secret = NewSecret, reason = Reason}, + State = #v1{connection = + #connection{protocol = Protocol, + user = User = #user{username = Username}, + log_name = ConnName} = Conn, + sock = Sock}) when ?IS_RUNNING(State) -> + rabbit_log_connection:debug( + "connection ~p (~s) of user '~s': " + "asked to update secret, reason: ~s~n", + [self(), dynamic_connection_name(ConnName), Username, Reason]), + case rabbit_access_control:update_state(User, NewSecret) of + {ok, User1} -> + %% User/auth backend state has been updated. Now we can propagate it to channels + %% asynchronously and return. All the channels have to do is to update their + %% own state. + %% + %% Any secret update errors coming from the authz backend will be handled in the other branch. + %% Therefore we optimistically do no error handling here. MK. + lists:foreach(fun(Ch) -> + rabbit_log:debug("Updating user/auth backend state for channel ~p", [Ch]), + _ = rabbit_channel:update_user_state(Ch, User1) + end, all_channels()), + ok = send_on_channel0(Sock, #'connection.update_secret_ok'{}, Protocol), + rabbit_log_connection:info( + "connection ~p (~s): " + "user '~s' updated secret, reason: ~s~n", + [self(), dynamic_connection_name(ConnName), Username, Reason]), + State#v1{connection = Conn#connection{user = User1}}; + {refused, Message} -> + rabbit_log_connection:error("Secret update was refused for user '~p': ~p", + [Username, Message]), + rabbit_misc:protocol_error(not_allowed, "New secret was refused by one of the backends", []); + {error, Message} -> + rabbit_log_connection:error("Secret update for user '~p' failed: ~p", + [Username, Message]), + rabbit_misc:protocol_error(not_allowed, + "Secret update failed", []) + end; handle_method0(_Method, State) when ?IS_STOPPING(State) -> State; handle_method0(_Method, #v1{connection_state = S}) -> diff --git a/test/channel_source_SUITE.erl b/test/channel_source_SUITE.erl index 5211dd7510..d56e9fed0f 100644 --- a/test/channel_source_SUITE.erl +++ b/test/channel_source_SUITE.erl @@ -135,8 +135,19 @@ undefined_channel_source(Config) -> undefined_channel_source1(_Config) -> ExistingChannels = rabbit_channel:list(), {_Writer, _Limiter, ServerCh} = rabbit_ct_broker_helpers:test_channel(), - [ServerCh] = rabbit_channel:list() -- ExistingChannels, + wait_for_server_channel(ExistingChannels, ServerCh, 60), [{source, undefined}] = rabbit_channel:info(ServerCh, [source]), _ = rabbit_channel:source(ServerCh, ?MODULE), [{source, ?MODULE}] = rabbit_channel:info(ServerCh, [source]), passed. + +wait_for_server_channel(ExistingChannels, ServerCh, 0) -> + [ServerCh] = rabbit_channel:list() -- ExistingChannels; +wait_for_server_channel(ExistingChannels, ServerCh, Attempts) -> + case rabbit_channel:list() -- ExistingChannels of + [ServerCh] -> + ok; + _ -> + timer:sleep(1000), + wait_for_server_channel(ExistingChannels, ServerCh, Attempts - 1) + end. diff --git a/test/config_schema_SUITE_data/rabbit.snippets b/test/config_schema_SUITE_data/rabbit.snippets index 9afddd9b1e..4e34738a60 100644 --- a/test/config_schema_SUITE_data/rabbit.snippets +++ b/test/config_schema_SUITE_data/rabbit.snippets @@ -566,6 +566,14 @@ credential_validator.regexp = ^abc\\d+", [{rabbit,[{log, [{categories, [{connection, [{file, "file_name_connection"}]}, {channel, [{file, "file_name_channel"}]}]}]}]}], []}, + + {default_worker_pool_size, + "default_worker_pool_size = 512", + [{rabbit, [ + {default_worker_pool_size, 512} + ]}], + []}, + {delegate_count, "delegate_count = 64", [{rabbit, [ diff --git a/test/quorum_queue_SUITE.erl b/test/quorum_queue_SUITE.erl index 1d9789fe89..cc0e7aa75a 100644 --- a/test/quorum_queue_SUITE.erl +++ b/test/quorum_queue_SUITE.erl @@ -1222,7 +1222,7 @@ add_member_not_running(Config) -> declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])), ?assertEqual({error, node_not_running}, rpc:call(Server, rabbit_quorum_queue, add_member, - [<<"/">>, QQ, 'rabbit@burrow'])). + [<<"/">>, QQ, 'rabbit@burrow', 5000])). add_member_classic(Config) -> [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename), @@ -1231,7 +1231,7 @@ add_member_classic(Config) -> ?assertEqual({'queue.declare_ok', CQ, 0, 0}, declare(Ch, CQ, [])), ?assertEqual({error, classic_queue_not_supported}, rpc:call(Server, rabbit_quorum_queue, add_member, - [<<"/">>, CQ, Server])). + [<<"/">>, CQ, Server, 5000])). add_member_already_a_member(Config) -> [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename), @@ -1242,14 +1242,14 @@ add_member_already_a_member(Config) -> %% idempotent by design ?assertEqual(ok, rpc:call(Server, rabbit_quorum_queue, add_member, - [<<"/">>, QQ, Server])). + [<<"/">>, QQ, Server, 5000])). add_member_not_found(Config) -> [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename), QQ = ?config(queue_name, Config), ?assertEqual({error, not_found}, rpc:call(Server, rabbit_quorum_queue, add_member, - [<<"/">>, QQ, Server])). + [<<"/">>, QQ, Server, 5000])). add_member(Config) -> [Server0, Server1] = Servers0 = @@ -1260,12 +1260,12 @@ add_member(Config) -> declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])), ?assertEqual({error, node_not_running}, rpc:call(Server0, rabbit_quorum_queue, add_member, - [<<"/">>, QQ, Server1])), + [<<"/">>, QQ, Server1, 5000])), ok = rabbit_control_helper:command(stop_app, Server1), ok = rabbit_control_helper:command(join_cluster, Server1, [atom_to_list(Server0)], []), rabbit_control_helper:command(start_app, Server1), ?assertEqual(ok, rpc:call(Server0, rabbit_quorum_queue, add_member, - [<<"/">>, QQ, Server1])), + [<<"/">>, QQ, Server1, 5000])), Info = rpc:call(Server0, rabbit_quorum_queue, infos, [rabbit_misc:r(<<"/">>, queue, QQ)]), Servers = lists:sort(Servers0), diff --git a/test/simple_ha_SUITE.erl b/test/simple_ha_SUITE.erl index b2caff86a9..d2137a686b 100644 --- a/test/simple_ha_SUITE.erl +++ b/test/simple_ha_SUITE.erl @@ -18,6 +18,7 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("amqp_client/include/amqp_client.hrl"). +-include_lib("eunit/include/eunit.hrl"). -compile(export_all). @@ -39,7 +40,8 @@ groups() -> {cluster_size_2, [], [ rapid_redeclare, declare_synchrony, - clean_up_exclusive_queues + clean_up_exclusive_queues, + clean_up_and_redeclare_exclusive_queues_on_other_nodes ]}, {cluster_size_3, [], [ consume_survives_stop, @@ -160,6 +162,43 @@ clean_up_exclusive_queues(Config) -> [[],[]] = rabbit_ct_broker_helpers:rpc_all(Config, rabbit_amqqueue, list, []), ok. +clean_up_and_redeclare_exclusive_queues_on_other_nodes(Config) -> + QueueCount = 10, + QueueNames = lists:map(fun(N) -> + NBin = erlang:integer_to_binary(N), + <<"exclusive-q-", NBin/binary>> + end, lists:seq(1, QueueCount)), + [A, B] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename), + Conn = rabbit_ct_client_helpers:open_unmanaged_connection(Config, A), + {ok, Ch} = amqp_connection:open_channel(Conn), + + LocationMinMasters = [ + {<<"x-queue-master-locator">>, longstr, <<"min-masters">>} + ], + lists:foreach(fun(QueueName) -> + declare_exclusive(Ch, QueueName, LocationMinMasters), + subscribe(Ch, QueueName) + end, QueueNames), + + ok = rabbit_ct_broker_helpers:kill_node(Config, B), + + Cancels = receive_cancels([]), + ?assert(length(Cancels) > 0), + + RemaniningQueues = rabbit_ct_broker_helpers:rpc(Config, A, rabbit_amqqueue, list, []), + + ?assertEqual(length(RemaniningQueues), QueueCount - length(Cancels)), + + lists:foreach(fun(QueueName) -> + declare_exclusive(Ch, QueueName, LocationMinMasters), + true = rabbit_ct_client_helpers:publish(Ch, QueueName, 1), + subscribe(Ch, QueueName) + end, QueueNames), + Messages = receive_messages([]), + ?assertEqual(10, length(Messages)), + ok = rabbit_ct_client_helpers:close_connection(Conn). + + consume_survives_stop(Cf) -> consume_survives(Cf, fun stop/2, true). consume_survives_sigkill(Cf) -> consume_survives(Cf, fun sigkill/2, true). consume_survives_policy(Cf) -> consume_survives(Cf, fun policy/2, true). @@ -286,3 +325,32 @@ open_incapable_channel(NodePort) -> client_properties = Props}), {ok, Ch} = amqp_connection:open_channel(ConsConn), Ch. + +declare_exclusive(Ch, QueueName, Args) -> + Declare = #'queue.declare'{queue = QueueName, + exclusive = true, + arguments = Args + }, + #'queue.declare_ok'{} = amqp_channel:call(Ch, Declare). + +subscribe(Ch, QueueName) -> + ConsumeOk = amqp_channel:call(Ch, #'basic.consume'{queue = QueueName, + no_ack = true}), + #'basic.consume_ok'{} = ConsumeOk, + receive ConsumeOk -> ok after ?DELAY -> throw(consume_ok_timeout) end. + +receive_cancels(Cancels) -> + receive + #'basic.cancel'{} = C -> + receive_cancels([C|Cancels]) + after ?DELAY -> + Cancels + end. + +receive_messages(All) -> + receive + {#'basic.deliver'{}, Msg} -> + receive_messages([Msg|All]) + after ?DELAY -> + lists:reverse(All) + end. diff --git a/test/test_util.erl b/test/test_util.erl index 7fcf247898..9a82b0ea1c 100644 --- a/test/test_util.erl +++ b/test/test_util.erl @@ -10,8 +10,19 @@ fake_pid(Node) -> ThisNodeSize = size(term_to_binary(node())) + 1, Pid = spawn(fun () -> ok end), %% drop the local node data from a local pid - <<_:ThisNodeSize/binary, LocalPidData/binary>> = term_to_binary(Pid), + <<Pre:ThisNodeSize/binary, LocalPidData/binary>> = term_to_binary(Pid), S = size(NodeBin), + %% get the encoding type of the pid + <<_:8, Type:8/unsigned, _/binary>> = Pre, %% replace it with the incoming node binary - Final = <<131,103, 100, S:16/unsigned, NodeBin/binary, LocalPidData/binary>>, + Final = <<131, Type, 100, S:16/unsigned, NodeBin/binary, LocalPidData/binary>>, binary_to_term(Final). + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +fake_pid_test() -> + _ = fake_pid(banana), + ok. + +-endif. diff --git a/test/unit_SUITE.erl b/test/unit_SUITE.erl index b19289e818..6582c24b1e 100644 --- a/test/unit_SUITE.erl +++ b/test/unit_SUITE.erl @@ -32,7 +32,9 @@ all() -> groups() -> [ {parallel_tests, [parallel], [ - auth_backend_internal_expand_topic_permission, + {access_control, [parallel], [ + auth_backend_internal_expand_topic_permission + ]}, {basic_header_handling, [parallel], [ write_table_with_invalid_existing_type, invalid_existing_headers, @@ -893,6 +895,10 @@ listing_plugins_from_multiple_directories(Config) -> end, ok. +%% +%% Access Control +%% + auth_backend_internal_expand_topic_permission(_Config) -> ExpandMap = #{<<"username">> => <<"guest">>, <<"vhost">> => <<"default">>}, %% simple case diff --git a/test/unit_inbroker_non_parallel_SUITE.erl b/test/unit_inbroker_non_parallel_SUITE.erl index 039f87d5b3..4f11adefbc 100644 --- a/test/unit_inbroker_non_parallel_SUITE.erl +++ b/test/unit_inbroker_non_parallel_SUITE.erl @@ -16,6 +16,7 @@ -module(unit_inbroker_non_parallel_SUITE). +-include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("kernel/include/file.hrl"). @@ -575,8 +576,9 @@ head_message_timestamp_statistics(Config) -> ?MODULE, head_message_timestamp1, [Config]). head_message_timestamp1(_Config) -> - %% Can't find a way to receive the ack here so can't test pending acks status - + %% there is no convenient rabbit_channel API for confirms + %% this test could use, so it relies on tx.* methods + %% and gen_server2 flushing application:set_env(rabbit, collect_statistics, fine), %% Set up a channel and queue @@ -597,6 +599,11 @@ head_message_timestamp1(_Config) -> Event1 = test_queue_statistics_receive_event(QPid, fun (E) -> proplists:get_value(name, E) == QRes end), '' = proplists:get_value(head_message_timestamp, Event1), + rabbit_channel:do(Ch, #'tx.select'{}), + receive #'tx.select_ok'{} -> ok + after ?TIMEOUT -> throw(failed_to_receive_tx_select_ok) + end, + %% Publish two messages and check timestamp is that of first message rabbit_channel:do(Ch, #'basic.publish'{exchange = <<"">>, routing_key = QName}, @@ -604,13 +611,18 @@ head_message_timestamp1(_Config) -> rabbit_channel:do(Ch, #'basic.publish'{exchange = <<"">>, routing_key = QName}, rabbit_basic:build_content(#'P_basic'{timestamp = 2}, <<"">>)), + rabbit_channel:do(Ch, #'tx.commit'{}), + rabbit_channel:flush(Ch), + receive #'tx.commit_ok'{} -> ok + after ?TIMEOUT -> throw(failed_to_receive_tx_commit_ok) + end, Event2 = test_queue_statistics_receive_event(QPid, fun (E) -> proplists:get_value(name, E) == QRes end), - 1 = proplists:get_value(head_message_timestamp, Event2), + ?assertEqual(1, proplists:get_value(head_message_timestamp, Event2)), %% Get first message and check timestamp is that of second message rabbit_channel:do(Ch, #'basic.get'{queue = QName, no_ack = true}), Event3 = test_queue_statistics_receive_event(QPid, fun (E) -> proplists:get_value(name, E) == QRes end), - 2 = proplists:get_value(head_message_timestamp, Event3), + ?assertEqual(2, proplists:get_value(head_message_timestamp, Event3)), %% Get second message and check timestamp is empty again rabbit_channel:do(Ch, #'basic.get'{queue = QName, no_ack = true}), diff --git a/test/unit_log_config_SUITE.erl b/test/unit_log_config_SUITE.erl index 77ddd8d642..93c050edda 100644 --- a/test/unit_log_config_SUITE.erl +++ b/test/unit_log_config_SUITE.erl @@ -139,6 +139,10 @@ sink_rewrite_sinks() -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend,[lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}]} @@ -228,6 +232,10 @@ sink_handlers_merged_with_lager_extra_sinks_handlers(_) -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend,[lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}]}]), @@ -309,6 +317,11 @@ level_sinks() -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend, + [lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}]} @@ -403,6 +416,10 @@ file_sinks() -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend,[lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}]} @@ -640,6 +657,10 @@ default_expected_sinks(UpgradeFile) -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend,[lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers, [{lager_file_backend, @@ -708,6 +729,10 @@ tty_expected_sinks() -> [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}]}, + {rabbit_log_shovel_lager_event, + [{handlers, [{lager_forwarder_backend,[lager_event,inherit]}]}, + {rabbit_handlers, + [{lager_forwarder_backend,[lager_event,inherit]}]}]}, {rabbit_log_upgrade_lager_event, [{handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}, {rabbit_handlers,[{lager_forwarder_backend,[lager_event,inherit]}]}]}]. |
